diff -Nru dafny-1.9.5/Binaries/dafny dafny-1.9.7/Binaries/dafny --- dafny-1.9.5/Binaries/dafny 1970-01-01 00:00:00.000000000 +0000 +++ dafny-1.9.7/Binaries/dafny 2016-06-05 21:11:14.000000000 +0000 @@ -0,0 +1,16 @@ +#!/usr/bin/env bash + +MONO=$(which mono) +DAFNY=$(dirname "${BASH_SOURCE[0]}")/Dafny.exe + +if [[ ! -x "$MONO" ]]; then + echo "Error: Dafny requires Mono to run on non-Windows systems." + exit 1 +fi + +if [[ ! -x "$DAFNY" ]]; then + echo "Error: Dafny.exe not found at $DAFNY." + exit 1 +fi + +"$MONO" "$DAFNY" "$@" diff -Nru dafny-1.9.5/Binaries/DafnyPrelude.bpl dafny-1.9.7/Binaries/DafnyPrelude.bpl --- dafny-1.9.5/Binaries/DafnyPrelude.bpl 2015-05-11 08:03:26.000000000 +0000 +++ dafny-1.9.7/Binaries/DafnyPrelude.bpl 2016-06-05 21:11:14.000000000 +0000 @@ -20,6 +20,7 @@ const unique TNat : Ty; const unique TReal : Ty; function TSet(Ty) : Ty; +function TISet(Ty) : Ty; function TMultiSet(Ty) : Ty; function TSeq(Ty) : Ty; function TMap(Ty, Ty) : Ty; @@ -27,6 +28,8 @@ function Inv0_TSet(Ty) : Ty; axiom (forall t: Ty :: { TSet(t) } Inv0_TSet(TSet(t)) == t); +function Inv0_TISet(Ty) : Ty; +axiom (forall t: Ty :: { TISet(t) } Inv0_TISet(TISet(t)) == t); function Inv0_TSeq(Ty) : Ty; axiom (forall t: Ty :: { TSeq(t) } Inv0_TSeq(TSeq(t)) == t); function Inv0_TMultiSet(Ty) : Ty; @@ -52,6 +55,7 @@ const unique TagNat : TyTag; const unique TagReal : TyTag; const unique TagSet : TyTag; +const unique TagISet : TyTag; const unique TagMultiSet : TyTag; const unique TagSeq : TyTag; const unique TagMap : TyTag; @@ -64,6 +68,7 @@ axiom Tag(TNat) == TagNat; axiom Tag(TReal) == TagReal; axiom (forall t: Ty :: { TSet(t) } Tag(TSet(t)) == TagSet); +axiom (forall t: Ty :: { TISet(t) } Tag(TISet(t)) == TagISet); axiom (forall t: Ty :: { TMultiSet(t) } Tag(TMultiSet(t)) == TagMultiSet); axiom (forall t: Ty :: { TSeq(t) } Tag(TSeq(t)) == TagSeq); axiom (forall t, u: Ty :: { TMap(t,u) } Tag(TMap(t,u)) == TagMap); @@ -138,6 +143,9 @@ { $IsBox(bx, TSet(t)) } ( $IsBox(bx, TSet(t)) ==> $Box($Unbox(bx) : Set Box) == bx && $Is($Unbox(bx) : Set Box, TSet(t)))); axiom (forall bx : Box, t : Ty :: + { $IsBox(bx, TISet(t)) } + ( $IsBox(bx, TISet(t)) ==> $Box($Unbox(bx) : ISet Box) == bx && $Is($Unbox(bx) : ISet Box, TISet(t)))); +axiom (forall bx : Box, t : Ty :: { $IsBox(bx, TMultiSet(t)) } ( $IsBox(bx, TMultiSet(t)) ==> $Box($Unbox(bx) : MultiSet Box) == bx && $Is($Unbox(bx) : MultiSet Box, TMultiSet(t)))); axiom (forall bx : Box, t : Ty :: @@ -187,6 +195,10 @@ $Is(v, TSet(t0)) <==> (forall bx: Box :: { v[bx] } v[bx] ==> $IsBox(bx, t0))); +axiom (forall v: ISet Box, t0: Ty :: { $Is(v, TISet(t0)) } + $Is(v, TISet(t0)) <==> + (forall bx: Box :: { v[bx] } + v[bx] ==> $IsBox(bx, t0))); axiom (forall v: MultiSet Box, t0: Ty :: { $Is(v, TMultiSet(t0)) } $Is(v, TMultiSet(t0)) <==> (forall bx: Box :: { v[bx] } @@ -202,6 +214,10 @@ $IsAlloc(v, TSet(t0), h) <==> (forall bx: Box :: { v[bx] } v[bx] ==> $IsAllocBox(bx, t0, h))); +axiom (forall v: ISet Box, t0: Ty, h: Heap :: { $IsAlloc(v, TISet(t0), h) } + $IsAlloc(v, TISet(t0), h) <==> + (forall bx: Box :: { v[bx] } + v[bx] ==> $IsAllocBox(bx, t0, h))); axiom (forall v: MultiSet Box, t0: Ty, h: Heap :: { $IsAlloc(v, TMultiSet(t0), h) } $IsAlloc(v, TMultiSet(t0), h) <==> (forall bx: Box :: { v[bx] } @@ -257,6 +273,8 @@ const unique class._System.seq: ClassName; const unique class._System.multiset: ClassName; +function Tclass._System.object(): Ty; + function /*{:never_pattern true}*/ dtype(ref): Ty; // changed from ClassName to Ty function TypeTuple(a: ClassName, b: ClassName): ClassName; @@ -271,6 +289,12 @@ type HandleType; +function SetRef_to_SetBox(s: [ref]bool): Set Box; +axiom (forall s: [ref]bool, bx: Box :: { SetRef_to_SetBox(s)[bx] } + SetRef_to_SetBox(s)[bx] == s[$Unbox(bx): ref]); +axiom (forall s: [ref]bool :: { SetRef_to_SetBox(s) } + $Is(SetRef_to_SetBox(s), TSet(Tclass._System.object()))); + // --------------------------------------------------------------- // -- Datatypes -------------------------------------------------- // --------------------------------------------------------------- @@ -382,7 +406,8 @@ function {:inline true} update(H:Heap, r:ref, f:Field alpha, v:alpha): Heap { H[r,f := v] } function $IsGoodHeap(Heap): bool; -var $Heap: Heap where $IsGoodHeap($Heap); +function $IsHeapAnchor(Heap): bool; +var $Heap: Heap where $IsGoodHeap($Heap) && $IsHeapAnchor($Heap); function $HeapSucc(Heap, Heap): bool; axiom (forall h: Heap, r: ref, f: Field alpha, x: alpha :: { update(h, r, f, x) } @@ -545,6 +570,81 @@ Set#Disjoint(a,b) <==> (forall o: T :: {a[o]} {b[o]} !a[o] || !b[o])); // --------------------------------------------------------------- +// -- Axiomatization of isets ------------------------------------- +// --------------------------------------------------------------- + +type ISet T = [T]bool; + +function ISet#Empty(): Set T; +axiom (forall o: T :: { ISet#Empty()[o] } !ISet#Empty()[o]); + +// the empty set could be of anything +//axiom (forall t: Ty :: { $Is(ISet#Empty() : [T]bool, TISet(t)) } $Is(ISet#Empty() : [T]bool, TISet(t))); + + +function ISet#UnionOne(ISet T, T): ISet T; +axiom (forall a: ISet T, x: T, o: T :: { ISet#UnionOne(a,x)[o] } + ISet#UnionOne(a,x)[o] <==> o == x || a[o]); +axiom (forall a: ISet T, x: T :: { ISet#UnionOne(a, x) } + ISet#UnionOne(a, x)[x]); +axiom (forall a: ISet T, x: T, y: T :: { ISet#UnionOne(a, x), a[y] } + a[y] ==> ISet#UnionOne(a, x)[y]); + +function ISet#Union(ISet T, ISet T): ISet T; +axiom (forall a: ISet T, b: ISet T, o: T :: { ISet#Union(a,b)[o] } + ISet#Union(a,b)[o] <==> a[o] || b[o]); +axiom (forall a, b: ISet T, y: T :: { ISet#Union(a, b), a[y] } + a[y] ==> ISet#Union(a, b)[y]); +axiom (forall a, b: Set T, y: T :: { ISet#Union(a, b), b[y] } + b[y] ==> ISet#Union(a, b)[y]); +axiom (forall a, b: ISet T :: { ISet#Union(a, b) } + ISet#Disjoint(a, b) ==> + ISet#Difference(ISet#Union(a, b), a) == b && + ISet#Difference(ISet#Union(a, b), b) == a); +// Follows from the general union axiom, but might be still worth including, because disjoint union is a common case: +// axiom (forall a, b: ISet T :: { ISet#Card(ISet#Union(a, b)) } +// ISet#Disjoint(a, b) ==> +// ISet#Card(ISet#Union(a, b)) == ISet#Card(a) + ISet#Card(b)); + +function ISet#Intersection(ISet T, ISet T): ISet T; +axiom (forall a: ISet T, b: ISet T, o: T :: { ISet#Intersection(a,b)[o] } + ISet#Intersection(a,b)[o] <==> a[o] && b[o]); + +axiom (forall a, b: ISet T :: { ISet#Union(ISet#Union(a, b), b) } + ISet#Union(ISet#Union(a, b), b) == ISet#Union(a, b)); +axiom (forall a, b: Set T :: { ISet#Union(a, ISet#Union(a, b)) } + ISet#Union(a, ISet#Union(a, b)) == ISet#Union(a, b)); +axiom (forall a, b: ISet T :: { ISet#Intersection(ISet#Intersection(a, b), b) } + ISet#Intersection(ISet#Intersection(a, b), b) == ISet#Intersection(a, b)); +axiom (forall a, b: ISet T :: { ISet#Intersection(a, ISet#Intersection(a, b)) } + ISet#Intersection(a, ISet#Intersection(a, b)) == ISet#Intersection(a, b)); + + +function ISet#Difference(ISet T, ISet T): ISet T; +axiom (forall a: ISet T, b: ISet T, o: T :: { ISet#Difference(a,b)[o] } + ISet#Difference(a,b)[o] <==> a[o] && !b[o]); +axiom (forall a, b: ISet T, y: T :: { ISet#Difference(a, b), b[y] } + b[y] ==> !ISet#Difference(a, b)[y] ); + +function ISet#Subset(ISet T, ISet T): bool; +axiom(forall a: ISet T, b: ISet T :: { ISet#Subset(a,b) } + ISet#Subset(a,b) <==> (forall o: T :: {a[o]} {b[o]} a[o] ==> b[o])); +// axiom(forall a: ISet T, b: ISet T :: +// { ISet#Subset(a,b), ISet#Card(a), ISet#Card(b) } // very restrictive trigger +// ISet#Subset(a,b) ==> ISet#Card(a) <= ISet#Card(b)); + + +function ISet#Equal(ISet T, ISet T): bool; +axiom(forall a: ISet T, b: ISet T :: { ISet#Equal(a,b) } + ISet#Equal(a,b) <==> (forall o: T :: {a[o]} {b[o]} a[o] <==> b[o])); +axiom(forall a: ISet T, b: ISet T :: { ISet#Equal(a,b) } // extensionality axiom for sets + ISet#Equal(a,b) ==> a == b); + +function ISet#Disjoint(ISet T, ISet T): bool; +axiom (forall a: ISet T, b: ISet T :: { ISet#Disjoint(a,b) } + ISet#Disjoint(a,b) <==> (forall o: T :: {a[o]} {b[o]} !a[o] || !b[o])); + +// --------------------------------------------------------------- // -- Axiomatization of multisets -------------------------------- // --------------------------------------------------------------- @@ -745,7 +845,7 @@ axiom (forall s: Seq T, x: T :: { Seq#Contains(s,x) } Seq#Contains(s,x) <==> (exists i: int :: { Seq#Index(s,i) } 0 <= i && i < Seq#Length(s) && Seq#Index(s,i) == x)); -axiom (forall x: ref :: +axiom (forall x: T :: { Seq#Contains(Seq#Empty(), x) } !Seq#Contains(Seq#Empty(), x)); diff -Nru dafny-1.9.5/Binaries/DafnyRuntime.cs dafny-1.9.7/Binaries/DafnyRuntime.cs --- dafny-1.9.5/Binaries/DafnyRuntime.cs 2015-05-11 08:03:26.000000000 +0000 +++ dafny-1.9.7/Binaries/DafnyRuntime.cs 2016-06-05 21:11:14.000000000 +0000 @@ -1,914 +1,1324 @@ -using System; // for Func -using System.Numerics; - -namespace Dafny -{ - using System.Collections.Generic; - - public class Set - { - Dictionary dict; - Set(Dictionary d) { - dict = d; - } - public static Set Empty { - get { - return new Set(new Dictionary(0)); - } - } - public static Set FromElements(params T[] values) { - Dictionary d = new Dictionary(values.Length); - foreach (T t in values) - d[t] = true; - return new Set(d); - } - public static Set FromCollection(ICollection values) { - Dictionary d = new Dictionary(); - foreach (T t in values) - d[t] = true; - return new Set(d); - } - public int Length { - get { return dict.Count; } - } - public long LongLength { - get { return dict.Count; } - } - public IEnumerable Elements { - get { - return dict.Keys; - } - } - /// - /// This is an inefficient iterator for producing all subsets of "this". Each set returned is the same - /// Set object (but this Set object is fresh; in particular, it is not "this"). - /// - public IEnumerable> AllSubsets { - get { - // Start by putting all set elements into a list - var elmts = new List(); - elmts.AddRange(dict.Keys); - var n = elmts.Count; - var which = new bool[n]; - var s = new Set(new Dictionary(0)); - while (true) { - yield return s; - // "add 1" to "which", as if doing a carry chain. For every digit changed, change the membership of the corresponding element in "s". - int i = 0; - for (; i < n && which[i]; i++) { - which[i] = false; - s.dict.Remove(elmts[i]); - } - if (i == n) { - // we have cycled through all the subsets - break; - } - which[i] = true; - s.dict.Add(elmts[i], true); - } - } - } - public bool Equals(Set other) { - return dict.Count == other.dict.Count && IsSubsetOf(other); - } - public override bool Equals(object other) { - return other is Set && Equals((Set)other); - } - public override int GetHashCode() { - var hashCode = 1; - foreach (var t in dict.Keys) { - hashCode = hashCode * (t.GetHashCode()+3); - } - return hashCode; - } - public override string ToString() { - var s = "{"; - var sep = ""; - foreach (var t in dict.Keys) { - s += sep + t.ToString(); - sep = ", "; - } - return s + "}"; - } - public bool IsProperSubsetOf(Set other) { - return dict.Count < other.dict.Count && IsSubsetOf(other); - } - public bool IsSubsetOf(Set other) { - if (other.dict.Count < dict.Count) - return false; - foreach (T t in dict.Keys) { - if (!other.dict.ContainsKey(t)) - return false; - } - return true; - } - public bool IsSupersetOf(Set other) { - return other.IsSubsetOf(this); - } - public bool IsProperSupersetOf(Set other) { - return other.IsProperSubsetOf(this); - } - public bool IsDisjointFrom(Set other) { - Dictionary a, b; - if (dict.Count < other.dict.Count) { - a = dict; b = other.dict; - } else { - a = other.dict; b = dict; - } - foreach (T t in a.Keys) { - if (b.ContainsKey(t)) - return false; - } - return true; - } - public bool Contains(T t) { - return dict.ContainsKey(t); - } - public Set Union(Set other) { - if (dict.Count == 0) - return other; - else if (other.dict.Count == 0) - return this; - Dictionary a, b; - if (dict.Count < other.dict.Count) { - a = dict; b = other.dict; - } else { - a = other.dict; b = dict; - } - Dictionary r = new Dictionary(); - foreach (T t in b.Keys) - r[t] = true; - foreach (T t in a.Keys) - r[t] = true; - return new Set(r); - } - public Set Intersect(Set other) { - if (dict.Count == 0) - return this; - else if (other.dict.Count == 0) - return other; - Dictionary a, b; - if (dict.Count < other.dict.Count) { - a = dict; b = other.dict; - } else { - a = other.dict; b = dict; - } - var r = new Dictionary(); - foreach (T t in a.Keys) { - if (b.ContainsKey(t)) - r.Add(t, true); - } - return new Set(r); - } - public Set Difference(Set other) { - if (dict.Count == 0) - return this; - else if (other.dict.Count == 0) - return this; - var r = new Dictionary(); - foreach (T t in dict.Keys) { - if (!other.dict.ContainsKey(t)) - r.Add(t, true); - } - return new Set(r); - } - } - public class MultiSet - { - Dictionary dict; - MultiSet(Dictionary d) { - dict = d; - } - public static MultiSet Empty { - get { - return new MultiSet(new Dictionary(0)); - } - } - public static MultiSet FromElements(params T[] values) { - Dictionary d = new Dictionary(values.Length); - foreach (T t in values) { - var i = 0; - if (!d.TryGetValue(t, out i)) { - i = 0; - } - d[t] = i + 1; - } - return new MultiSet(d); - } - public static MultiSet FromCollection(ICollection values) { - Dictionary d = new Dictionary(); - foreach (T t in values) { - var i = 0; - if (!d.TryGetValue(t, out i)) { - i = 0; - } - d[t] = i + 1; - } - return new MultiSet(d); - } - public static MultiSet FromSeq(Sequence values) { - Dictionary d = new Dictionary(); - foreach (T t in values.Elements) { - var i = 0; - if (!d.TryGetValue(t, out i)) { - i = 0; - } - d[t] = i + 1; - } - return new MultiSet(d); - } - public static MultiSet FromSet(Set values) { - Dictionary d = new Dictionary(); - foreach (T t in values.Elements) { - d[t] = 1; - } - return new MultiSet(d); - } - - public bool Equals(MultiSet other) { - return other.IsSubsetOf(this) && this.IsSubsetOf(other); - } - public override bool Equals(object other) { - return other is MultiSet && Equals((MultiSet)other); - } - public override int GetHashCode() { - var hashCode = 1; - foreach (var kv in dict) { - var key = kv.Key.GetHashCode(); - key = (key << 3) | (key >> 29) ^ kv.Value.GetHashCode(); - hashCode = hashCode * (key + 3); - } - return hashCode; - } - public override string ToString() { - var s = "multiset{"; - var sep = ""; - foreach (var kv in dict) { - var t = kv.Key.ToString(); - for (int i = 0; i < kv.Value; i++) { - s += sep + t.ToString(); - sep = ", "; - } - } - return s + "}"; - } - public bool IsProperSubsetOf(MultiSet other) { - return !Equals(other) && IsSubsetOf(other); - } - public bool IsSubsetOf(MultiSet other) { - foreach (T t in dict.Keys) { - if (!other.dict.ContainsKey(t) || other.dict[t] < dict[t]) - return false; - } - return true; - } - public bool IsSupersetOf(MultiSet other) { - return other.IsSubsetOf(this); - } - public bool IsProperSupersetOf(MultiSet other) { - return other.IsProperSubsetOf(this); - } - public bool IsDisjointFrom(MultiSet other) { - foreach (T t in dict.Keys) { - if (other.dict.ContainsKey(t)) - return false; - } - foreach (T t in other.dict.Keys) { - if (dict.ContainsKey(t)) - return false; - } - return true; - } - public bool Contains(T t) { - return dict.ContainsKey(t); - } - public MultiSet Union(MultiSet other) { - if (dict.Count == 0) - return other; - else if (other.dict.Count == 0) - return this; - var r = new Dictionary(); - foreach (T t in dict.Keys) { - var i = 0; - if (!r.TryGetValue(t, out i)) { - i = 0; - } - r[t] = i + dict[t]; - } - foreach (T t in other.dict.Keys) { - var i = 0; - if (!r.TryGetValue(t, out i)) { - i = 0; - } - r[t] = i + other.dict[t]; - } - return new MultiSet(r); - } - public MultiSet Intersect(MultiSet other) { - if (dict.Count == 0) - return this; - else if (other.dict.Count == 0) - return other; - var r = new Dictionary(); - foreach (T t in dict.Keys) { - if (other.dict.ContainsKey(t)) { - r.Add(t, other.dict[t] < dict[t] ? other.dict[t] : dict[t]); - } - } - return new MultiSet(r); - } - public MultiSet Difference(MultiSet other) { // \result == this - other - if (dict.Count == 0) - return this; - else if (other.dict.Count == 0) - return this; - var r = new Dictionary(); - foreach (T t in dict.Keys) { - if (!other.dict.ContainsKey(t)) { - r.Add(t, dict[t]); - } else if (other.dict[t] < dict[t]) { - r.Add(t, dict[t] - other.dict[t]); - } - } - return new MultiSet(r); - } - public IEnumerable Elements { - get { - List l = new List(); - foreach (T t in dict.Keys) { - int n; - dict.TryGetValue(t, out n); - for (int i = 0; i < n; i ++) { - l.Add(t); - } - } - return l; - } - } - } - - public class Map - { - Dictionary dict; - Map(Dictionary d) { - dict = d; - } - public static Map Empty { - get { - return new Map(new Dictionary()); - } - } - public static Map FromElements(params Pair[] values) { - Dictionary d = new Dictionary(values.Length); - foreach (Pair p in values) { - d[p.Car] = p.Cdr; - } - return new Map(d); - } - public static Map FromCollection(List> values) { - Dictionary d = new Dictionary(values.Count); - foreach (Pair p in values) { - d[p.Car] = p.Cdr; - } - return new Map(d); - } - public int Length { - get { return dict.Count; } - } - public long LongLength { - get { return dict.Count; } - } - public bool Equals(Map other) { - foreach (U u in dict.Keys) { - V v1, v2; - if (!dict.TryGetValue(u, out v1)) { - return false; // this shouldn't happen - } - if (!other.dict.TryGetValue(u, out v2)) { - return false; // other dictionary does not contain this element - } - if (!v1.Equals(v2)) { - return false; - } - } - foreach (U u in other.dict.Keys) { - if (!dict.ContainsKey(u)) { - return false; // this shouldn't happen - } - } - return true; - } - public override bool Equals(object other) { - return other is Map && Equals((Map)other); - } - public override int GetHashCode() { - var hashCode = 1; - foreach (var kv in dict) { - var key = kv.Key.GetHashCode(); - key = (key << 3) | (key >> 29) ^ kv.Value.GetHashCode(); - hashCode = hashCode * (key + 3); - } - return hashCode; - } - public override string ToString() { - var s = "map["; - var sep = ""; - foreach (var kv in dict) { - s += sep + kv.Key.ToString() + " := " + kv.Value.ToString(); - sep = ", "; - } - return s + "]"; - } - public bool IsDisjointFrom(Map other) { - foreach (U u in dict.Keys) { - if (other.dict.ContainsKey(u)) - return false; - } - foreach (U u in other.dict.Keys) { - if (dict.ContainsKey(u)) - return false; - } - return true; - } - public bool Contains(U u) { - return dict.ContainsKey(u); - } - public V Select(U index) { - return dict[index]; - } - public Map Update(U index, V val) { - Dictionary d = new Dictionary(dict); - d[index] = val; - return new Map(d); - } - public IEnumerable Domain { - get { - return dict.Keys; - } - } - } - public class Sequence - { - T[] elmts; - public Sequence(T[] ee) { - elmts = ee; - } - public static Sequence Empty { - get { - return new Sequence(new T[0]); - } - } - public static Sequence FromElements(params T[] values) { - return new Sequence(values); - } - public static Sequence FromString(string s) { - return new Sequence(s.ToCharArray()); - } - public int Length { - get { return elmts.Length; } - } - public long LongLength { - get { return elmts.LongLength; } - } - public T[] Elements { - get { - return elmts; - } - } - public IEnumerable UniqueElements { - get { - var st = Set.FromElements(elmts); - return st.Elements; - } - } - public T Select(ulong index) { - return elmts[index]; - } - public T Select(long index) { - return elmts[index]; - } - public T Select(uint index) { - return elmts[index]; - } - public T Select(int index) { - return elmts[index]; - } - public T Select(BigInteger index) { - return elmts[(int)index]; - } - public Sequence Update(long index, T t) { - T[] a = (T[])elmts.Clone(); - a[index] = t; - return new Sequence(a); - } - public Sequence Update(ulong index, T t) { - return Update((long)index, t); - } - public Sequence Update(BigInteger index, T t) { - return Update((long)index, t); - } - public bool Equals(Sequence other) { - int n = elmts.Length; - return n == other.elmts.Length && EqualUntil(other, n); - } - public override bool Equals(object other) { - return other is Sequence && Equals((Sequence)other); - } - public override int GetHashCode() { - if (elmts == null || elmts.Length == 0) - return 0; - var hashCode = 0; - for (var i = 0; i < elmts.Length; i++) { - hashCode = (hashCode << 3) | (hashCode >> 29) ^ elmts[i].GetHashCode(); - } - return hashCode; - } - public override string ToString() { - if (elmts is char[]) { - var s = ""; - foreach (var t in elmts) { - s += t.ToString(); - } - return s; - } else { - var s = "["; - var sep = ""; - foreach (var t in elmts) { - s += sep + t.ToString(); - sep = ", "; - } - return s + "]"; - } - } - bool EqualUntil(Sequence other, int n) { - for (int i = 0; i < n; i++) { - if (!elmts[i].Equals(other.elmts[i])) - return false; - } - return true; - } - public bool IsProperPrefixOf(Sequence other) { - int n = elmts.Length; - return n < other.elmts.Length && EqualUntil(other, n); - } - public bool IsPrefixOf(Sequence other) { - int n = elmts.Length; - return n <= other.elmts.Length && EqualUntil(other, n); - } - public Sequence Concat(Sequence other) { - if (elmts.Length == 0) - return other; - else if (other.elmts.Length == 0) - return this; - T[] a = new T[elmts.Length + other.elmts.Length]; - System.Array.Copy(elmts, 0, a, 0, elmts.Length); - System.Array.Copy(other.elmts, 0, a, elmts.Length, other.elmts.Length); - return new Sequence(a); - } - public bool Contains(T t) { - int n = elmts.Length; - for (int i = 0; i < n; i++) { - if (t.Equals(elmts[i])) - return true; - } - return false; - } - public Sequence Take(long m) { - if (elmts.LongLength == m) - return this; - T[] a = new T[m]; - System.Array.Copy(elmts, a, m); - return new Sequence(a); - } - public Sequence Take(ulong n) { - return Take((long)n); - } - public Sequence Take(BigInteger n) { - return Take((long)n); - } - public Sequence Drop(long m) { - if (m == 0) - return this; - T[] a = new T[elmts.Length - m]; - System.Array.Copy(elmts, m, a, 0, elmts.Length - m); - return new Sequence(a); - } - public Sequence Drop(ulong n) { - return Drop((long)n); - } - public Sequence Drop(BigInteger n) { - if (n.IsZero) - return this; - return Drop((long)n); - } - } - public struct Pair - { - public readonly A Car; - public readonly B Cdr; - public Pair(A a, B b) { - this.Car = a; - this.Cdr = b; - } - } - public partial class Helpers { - // Computing forall/exists quantifiers - public static bool QuantBool(bool frall, System.Predicate pred) { - if (frall) { - return pred(false) && pred(true); - } else { - return pred(false) || pred(true); - } - } - public static bool QuantInt(BigInteger lo, BigInteger hi, bool frall, System.Predicate pred) { - for (BigInteger i = lo; i < hi; i++) { - if (pred(i) != frall) { return !frall; } - } - return frall; - } - public static bool QuantSet(Dafny.Set set, bool frall, System.Predicate pred) { - foreach (var u in set.Elements) { - if (pred(u) != frall) { return !frall; } - } - return frall; - } - public static bool QuantMap(Dafny.Map map, bool frall, System.Predicate pred) { - foreach (var u in map.Domain) { - if (pred(u) != frall) { return !frall; } - } - return frall; - } - public static bool QuantSeq(Dafny.Sequence seq, bool frall, System.Predicate pred) { - foreach (var u in seq.Elements) { - if (pred(u) != frall) { return !frall; } - } - return frall; - } - public static bool QuantDatatype(IEnumerable set, bool frall, System.Predicate pred) { - foreach (var u in set) { - if (pred(u) != frall) { return !frall; } - } - return frall; - } - // Enumerating other collections - public delegate Dafny.Set ComprehensionDelegate(); - public delegate Dafny.Map MapComprehensionDelegate(); - public static IEnumerable AllBooleans { - get { - yield return false; - yield return true; - } - } - public static IEnumerable AllIntegers { - get { - yield return new BigInteger(0); - for (var j = new BigInteger(1);; j++) { - yield return j; - yield return -j; - } - } - } - // pre: b != 0 - // post: result == a/b, as defined by Euclidean Division (http://en.wikipedia.org/wiki/Modulo_operation) - public static sbyte EuclideanDivision_sbyte(sbyte a, sbyte b) { - return (sbyte)EuclideanDivision_int(a, b); - } - public static short EuclideanDivision_short(short a, short b) { - return (short)EuclideanDivision_int(a, b); - } - public static int EuclideanDivision_int(int a, int b) { - if (0 <= a) { - if (0 <= b) { - // +a +b: a/b - return (int)(((uint)(a)) / ((uint)(b))); - } else { - // +a -b: -(a/(-b)) - return -((int)(((uint)(a)) / ((uint)(unchecked(-b))))); - } - } else { - if (0 <= b) { - // -a +b: -((-a-1)/b) - 1 - return -((int)(((uint)(-(a + 1))) / ((uint)(b)))) - 1; - } else { - // -a -b: ((-a-1)/(-b)) + 1 - return ((int)(((uint)(-(a + 1))) / ((uint)(unchecked(-b))))) + 1; - } - } - } - public static long EuclideanDivision_long(long a, long b) { - if (0 <= a) { - if (0 <= b) { - // +a +b: a/b - return (long)(((ulong)(a)) / ((ulong)(b))); - } else { - // +a -b: -(a/(-b)) - return -((long)(((ulong)(a)) / ((ulong)(unchecked(-b))))); - } - } else { - if (0 <= b) { - // -a +b: -((-a-1)/b) - 1 - return -((long)(((ulong)(-(a + 1))) / ((ulong)(b)))) - 1; - } else { - // -a -b: ((-a-1)/(-b)) + 1 - return ((long)(((ulong)(-(a + 1))) / ((ulong)(unchecked(-b))))) + 1; - } - } - } - public static BigInteger EuclideanDivision(BigInteger a, BigInteger b) { - if (0 <= a.Sign) { - if (0 <= b.Sign) { - // +a +b: a/b - return BigInteger.Divide(a, b); - } else { - // +a -b: -(a/(-b)) - return BigInteger.Negate(BigInteger.Divide(a, BigInteger.Negate(b))); - } - } else { - if (0 <= b.Sign) { - // -a +b: -((-a-1)/b) - 1 - return BigInteger.Negate(BigInteger.Divide(BigInteger.Negate(a) - 1, b)) - 1; - } else { - // -a -b: ((-a-1)/(-b)) + 1 - return BigInteger.Divide(BigInteger.Negate(a) - 1, BigInteger.Negate(b)) + 1; - } - } - } - // pre: b != 0 - // post: result == a%b, as defined by Euclidean Division (http://en.wikipedia.org/wiki/Modulo_operation) - public static sbyte EuclideanModulus_sbyte(sbyte a, sbyte b) { - return (sbyte)EuclideanModulus_int(a, b); - } - public static short EuclideanModulus_short(short a, short b) { - return (short)EuclideanModulus_int(a, b); - } - public static int EuclideanModulus_int(int a, int b) { - uint bp = (0 <= b) ? (uint)b : (uint)(unchecked(-b)); - if (0 <= a) { - // +a: a % b' - return (int)(((uint)a) % bp); - } else { - // c = ((-a) % b') - // -a: b' - c if c > 0 - // -a: 0 if c == 0 - uint c = ((uint)(unchecked(-a))) % bp; - return (int)(c == 0 ? c : bp - c); - } - } - public static long EuclideanModulus_long(long a, long b) { - ulong bp = (0 <= b) ? (ulong)b : (ulong)(unchecked(-b)); - if (0 <= a) { - // +a: a % b' - return (long)(((ulong)a) % bp); - } else { - // c = ((-a) % b') - // -a: b' - c if c > 0 - // -a: 0 if c == 0 - ulong c = ((ulong)(unchecked(-a))) % bp; - return (long)(c == 0 ? c : bp - c); - } - } - public static BigInteger EuclideanModulus(BigInteger a, BigInteger b) { - var bp = BigInteger.Abs(b); - if (0 <= a.Sign) { - // +a: a % b' - return BigInteger.Remainder(a, bp); - } else { - // c = ((-a) % b') - // -a: b' - c if c > 0 - // -a: 0 if c == 0 - var c = BigInteger.Remainder(BigInteger.Negate(a), bp); - return c.IsZero ? c : BigInteger.Subtract(bp, c); - } - } - public static Sequence SeqFromArray(T[] array) { - return new Sequence(array); - } - // In .NET version 4.5, it it possible to mark a method with "AggressiveInlining", which says to inline the - // method if possible. Method "ExpressionSequence" would be a good candidate for it: - // [System.Runtime.CompilerServices.MethodImpl(System.Runtime.CompilerServices.MethodImplOptions.AggressiveInlining)] - public static U ExpressionSequence(T t, U u) - { - return u; - } - - public static U Let(T t, Func f) { - return f(t); - } - - public delegate Result Function(Input input); - - public static A Id(A a) { - return a; - } - } - - public struct BigRational - { - public static readonly BigRational ZERO = new BigRational(0); - - BigInteger num, den; // invariant 1 <= den - public override string ToString() { - return string.Format("({0}.0 / {1}.0)", num, den); - } - public BigRational(int n) { - num = new BigInteger(n); - den = BigInteger.One; - } - public BigRational(BigInteger n, BigInteger d) { - // requires 1 <= d - num = n; - den = d; - } - public BigInteger ToBigInteger() { - if (0 <= num) { - return num / den; - } else { - return (num - den + 1) / den; - } - } - /// - /// Returns values such that aa/dd == a and bb/dd == b. - /// - private static void Normalize(BigRational a, BigRational b, out BigInteger aa, out BigInteger bb, out BigInteger dd) { - var gcd = BigInteger.GreatestCommonDivisor(a.den, b.den); - var xx = a.den / gcd; - var yy = b.den / gcd; - // We now have a == a.num / (xx * gcd) and b == b.num / (yy * gcd). - aa = a.num * yy; - bb = b.num * xx; - dd = a.den * yy; - } - public int CompareTo(BigRational that) { - // simple things first - int asign = this.num.Sign; - int bsign = that.num.Sign; - if (asign < 0 && 0 <= bsign) { - return 1; - } else if (asign <= 0 && 0 < bsign) { - return 1; - } else if (bsign < 0 && 0 <= asign) { - return -1; - } else if (bsign <= 0 && 0 < asign) { - return -1; - } - BigInteger aa, bb, dd; - Normalize(this, that, out aa, out bb, out dd); - return aa.CompareTo(bb); - } - public override int GetHashCode() { - return num.GetHashCode() + 29 * den.GetHashCode(); - } - public override bool Equals(object obj) { - if (obj is BigRational) { - return this == (BigRational)obj; - } else { - return false; - } - } - public static bool operator ==(BigRational a, BigRational b) { - return a.CompareTo(b) == 0; - } - public static bool operator !=(BigRational a, BigRational b) { - return a.CompareTo(b) != 0; - } - public static bool operator >(BigRational a, BigRational b) { - return 0 < a.CompareTo(b); - } - public static bool operator >=(BigRational a, BigRational b) { - return 0 <= a.CompareTo(b); - } - public static bool operator <(BigRational a, BigRational b) { - return a.CompareTo(b) < 0; - } - public static bool operator <=(BigRational a, BigRational b) { - return a.CompareTo(b) <= 0; - } - public static BigRational operator +(BigRational a, BigRational b) { - BigInteger aa, bb, dd; - Normalize(a, b, out aa, out bb, out dd); - return new BigRational(aa + bb, dd); - } - public static BigRational operator -(BigRational a, BigRational b) { - BigInteger aa, bb, dd; - Normalize(a, b, out aa, out bb, out dd); - return new BigRational(aa - bb, dd); - } - public static BigRational operator -(BigRational a) { - return new BigRational(-a.num, a.den); - } - public static BigRational operator *(BigRational a, BigRational b) { - return new BigRational(a.num * b.num, a.den * b.den); - } - public static BigRational operator /(BigRational a, BigRational b) { - // Compute the reciprocal of b - BigRational bReciprocal; - if (0 < b.num) { - bReciprocal = new BigRational(b.den, b.num); - } else { - // this is the case b.num < 0 - bReciprocal = new BigRational(-b.den, -b.num); - } - return a * bReciprocal; - } - } -} +using System; // for Func +using System.Numerics; + +namespace Dafny +{ + using System.Collections.Generic; +// set this option if you want to use System.Collections.Immutable and if you know what you're doing. +#if DAFNY_USE_SYSTEM_COLLECTIONS_IMMUTABLE + using System.Collections.Immutable; + using System.Linq; + + public class Set + { + readonly ImmutableHashSet setImpl; + Set(ImmutableHashSet d) { + this.setImpl = d; + } + public static readonly Set Empty = new Set(ImmutableHashSet.Empty); + public static Set FromElements(params T[] values) { + return FromElements((IEnumerable)values); + } + public static Set FromElements(IEnumerable values) { + var d = ImmutableHashSet.Empty.ToBuilder(); + foreach (T t in values) + d.Add(t); + return new Set(d.ToImmutable()); + } + public static Set FromCollection(ICollection values) { + var d = ImmutableHashSet.Empty.ToBuilder(); + foreach (T t in values) + d.Add(t); + return new Set(d.ToImmutable()); + } + public int Length { + get { return this.setImpl.Count; } + } + public long LongLength { + get { return this.setImpl.Count; } + } + public IEnumerable Elements { + get { + return this.setImpl; + } + } + /// + /// This is an inefficient iterator for producing all subsets of "this". Each set returned is the same + /// Set object (but this Set object is fresh; in particular, it is not "this"). + /// + public IEnumerable> AllSubsets { + get { + // Start by putting all set elements into a list + var elmts = new List(); + elmts.AddRange(this.setImpl); + var n = elmts.Count; + var which = new bool[n]; + var s = ImmutableHashSet.Empty.ToBuilder(); + while (true) { + yield return new Set(s.ToImmutable()); + // "add 1" to "which", as if doing a carry chain. For every digit changed, change the membership of the corresponding element in "s". + int i = 0; + for (; i < n && which[i]; i++) { + which[i] = false; + s.Remove(elmts[i]); + } + if (i == n) { + // we have cycled through all the subsets + break; + } + which[i] = true; + s.Add(elmts[i]); + } + } + } + public bool Equals(Set other) { + return this.setImpl.SetEquals(other.setImpl); + } + public override bool Equals(object other) { + var otherSet = other as Set; + return otherSet != null && this.Equals(otherSet); + } + public override int GetHashCode() { + var hashCode = 1; + foreach (var t in this.setImpl) { + hashCode = hashCode * (t.GetHashCode()+3); + } + return hashCode; + } + public override string ToString() { + var s = "{"; + var sep = ""; + foreach (var t in this.setImpl) { + s += sep + t.ToString(); + sep = ", "; + } + return s + "}"; + } + public bool IsProperSubsetOf(Set other) { + return IsProperSubsetOf(other); + } + public bool IsSubsetOf(Set other) { + return IsSubsetOf(other); + } + public bool IsSupersetOf(Set other) { + return other.IsSupersetOf(this); + } + public bool IsProperSupersetOf(Set other) { + return other.IsProperSupersetOf(this); + } + public bool IsDisjointFrom(Set other) { + ImmutableHashSet a, b; + if (this.setImpl.Count < other.setImpl.Count) { + a = this.setImpl; b = other.setImpl; + } else { + a = other.setImpl; b = this.setImpl; + } + foreach (T t in a) { + if (b.Contains(t)) + return false; + } + return true; + } + public bool Contains(T t) { + return this.setImpl.Contains(t); + } + public Set Union(Set other) { + return new Set(this.setImpl.Union(other.setImpl)); + } + public Set Intersect(Set other) { + return new Set(this.setImpl.Intersect(other.setImpl)); + } + public Set Difference(Set other) { + return new Set(this.setImpl.Except(other.setImpl)); + } + } + public partial class MultiSet + { + + readonly ImmutableDictionary dict; + MultiSet(ImmutableDictionary d) { + dict = d; + } + public static readonly MultiSet Empty = new MultiSet(ImmutableDictionary.Empty); + public static MultiSet FromElements(params T[] values) { + var d = ImmutableDictionary.Empty.ToBuilder(); + foreach (T t in values) { + var i = 0; + if (!d.TryGetValue(t, out i)) { + i = 0; + } + d[t] = i + 1; + } + return new MultiSet(d.ToImmutable()); + } + public static MultiSet FromCollection(ICollection values) { + var d = ImmutableDictionary.Empty.ToBuilder(); + foreach (T t in values) { + var i = 0; + if (!d.TryGetValue(t, out i)) { + i = 0; + } + d[t] = i + 1; + } + return new MultiSet(d.ToImmutable()); + } + public static MultiSet FromSeq(Sequence values) { + var d = ImmutableDictionary.Empty.ToBuilder(); + foreach (T t in values.Elements) { + var i = 0; + if (!d.TryGetValue(t, out i)) { + i = 0; + } + d[t] = i + 1; + } + return new MultiSet(d.ToImmutable()); + } + public static MultiSet FromSet(Set values) { + var d = ImmutableDictionary.Empty.ToBuilder(); + foreach (T t in values.Elements) { + d[t] = 1; + } + return new MultiSet(d.ToImmutable()); + } + + public bool Equals(MultiSet other) { + return other.IsSubsetOf(this) && this.IsSubsetOf(other); + } + public override bool Equals(object other) { + return other is MultiSet && Equals((MultiSet)other); + } + public override int GetHashCode() { + var hashCode = 1; + foreach (var kv in dict) { + var key = kv.Key.GetHashCode(); + key = (key << 3) | (key >> 29) ^ kv.Value.GetHashCode(); + hashCode = hashCode * (key + 3); + } + return hashCode; + } + public override string ToString() { + var s = "multiset{"; + var sep = ""; + foreach (var kv in dict) { + var t = kv.Key.ToString(); + for (int i = 0; i < kv.Value; i++) { + s += sep + t.ToString(); + sep = ", "; + } + } + return s + "}"; + } + public bool IsProperSubsetOf(MultiSet other) { + return !Equals(other) && IsSubsetOf(other); + } + public bool IsSubsetOf(MultiSet other) { + foreach (T t in dict.Keys) { + if (!other.dict.ContainsKey(t) || other.dict[t] < dict[t]) + return false; + } + return true; + } + public bool IsSupersetOf(MultiSet other) { + return other.IsSubsetOf(this); + } + public bool IsProperSupersetOf(MultiSet other) { + return other.IsProperSubsetOf(this); + } + public bool IsDisjointFrom(MultiSet other) { + foreach (T t in dict.Keys) { + if (other.dict.ContainsKey(t)) + return false; + } + foreach (T t in other.dict.Keys) { + if (dict.ContainsKey(t)) + return false; + } + return true; + } + public bool Contains(T t) { + return dict.ContainsKey(t); + } + public MultiSet Union(MultiSet other) { + if (dict.Count == 0) + return other; + else if (other.dict.Count == 0) + return this; + var r = ImmutableDictionary.Empty.ToBuilder(); + foreach (T t in dict.Keys) { + var i = 0; + if (!r.TryGetValue(t, out i)) { + i = 0; + } + r[t] = i + dict[t]; + } + foreach (T t in other.dict.Keys) { + var i = 0; + if (!r.TryGetValue(t, out i)) { + i = 0; + } + r[t] = i + other.dict[t]; + } + return new MultiSet(r.ToImmutable()); + } + public MultiSet Intersect(MultiSet other) { + if (dict.Count == 0) + return this; + else if (other.dict.Count == 0) + return other; + var r = ImmutableDictionary.Empty.ToBuilder(); + foreach (T t in dict.Keys) { + if (other.dict.ContainsKey(t)) { + r[t] = other.dict[t] < dict[t] ? other.dict[t] : dict[t]; + } + } + return new MultiSet(r.ToImmutable()); + } + public MultiSet Difference(MultiSet other) { // \result == this - other + if (dict.Count == 0) + return this; + else if (other.dict.Count == 0) + return this; + var r = ImmutableDictionary.Empty.ToBuilder(); + foreach (T t in dict.Keys) { + if (!other.dict.ContainsKey(t)) { + r[t] = dict[t]; + } else if (other.dict[t] < dict[t]) { + r[t] = dict[t] - other.dict[t]; + } + } + return new MultiSet(r.ToImmutable()); + } + public IEnumerable Elements { + get { + foreach (T t in dict.Keys) { + int n; + dict.TryGetValue(t, out n); + for (int i = 0; i < n; i ++) { + yield return t; + } + } + } + } + } + + public partial class Map + { + readonly ImmutableDictionary dict; + Map(ImmutableDictionary d) { + dict = d; + } + public static readonly Map Empty = new Map(ImmutableDictionary.Empty); + public static Map FromElements(params Pair[] values) { + var d = ImmutableDictionary.Empty.ToBuilder(); + foreach (Pair p in values) { + d[p.Car] = p.Cdr; + } + return new Map(d.ToImmutable()); + } + public static Map FromCollection(List> values) { + var d = ImmutableDictionary.Empty.ToBuilder(); + foreach (Pair p in values) { + d[p.Car] = p.Cdr; + } + return new Map(d.ToImmutable()); + } + public int Length { + get { return dict.Count; } + } + public long LongLength { + get { return dict.Count; } + } + public bool Equals(Map other) { + foreach (U u in dict.Keys) { + V v1, v2; + if (!dict.TryGetValue(u, out v1)) { + return false; // this shouldn't happen + } + if (!other.dict.TryGetValue(u, out v2)) { + return false; // other dictionary does not contain this element + } + if (!v1.Equals(v2)) { + return false; + } + } + foreach (U u in other.dict.Keys) { + if (!dict.ContainsKey(u)) { + return false; // this shouldn't happen + } + } + return true; + } + public override bool Equals(object other) { + return other is Map && Equals((Map)other); + } + public override int GetHashCode() { + var hashCode = 1; + foreach (var kv in dict) { + var key = kv.Key.GetHashCode(); + key = (key << 3) | (key >> 29) ^ kv.Value.GetHashCode(); + hashCode = hashCode * (key + 3); + } + return hashCode; + } + public override string ToString() { + var s = "map["; + var sep = ""; + foreach (var kv in dict) { + s += sep + kv.Key.ToString() + " := " + kv.Value.ToString(); + sep = ", "; + } + return s + "]"; + } + public bool IsDisjointFrom(Map other) { + foreach (U u in dict.Keys) { + if (other.dict.ContainsKey(u)) + return false; + } + foreach (U u in other.dict.Keys) { + if (dict.ContainsKey(u)) + return false; + } + return true; + } + public bool Contains(U u) { + return dict.ContainsKey(u); + } + public V Select(U index) { + return dict[index]; + } + public Map Update(U index, V val) { + return new Map(dict.SetItem(index, val)); + } + public IEnumerable Domain { + get { + return dict.Keys; + } + } + } +#else // !def DAFNY_USE_SYSTEM_COLLECTIONS_IMMUTABLE + public class Set + { + HashSet set; + Set(HashSet s) { + this.set = s; + } + public static Set Empty { + get { + return new Set(new HashSet()); + } + } + public static Set FromElements(params T[] values) { + var s = new HashSet(); + foreach (T t in values) + s.Add(t); + return new Set(s); + } + public static Set FromCollection(ICollection values) { + HashSet s = new HashSet(); + foreach (T t in values) + s.Add(t); + return new Set(s); + } + public int Length { + get { return this.set.Count; } + } + public long LongLength { + get { return this.set.Count; } + } + public IEnumerable Elements { + get { + return this.set; + } + } + /// + /// This is an inefficient iterator for producing all subsets of "this". Each set returned is the same + /// Set object (but this Set object is fresh; in particular, it is not "this"). + /// + public IEnumerable> AllSubsets { + get { + // Start by putting all set elements into a list + var elmts = new List(); + elmts.AddRange(this.set); + var n = elmts.Count; + var which = new bool[n]; + var s = new Set(new HashSet()); + while (true) { + yield return s; + // "add 1" to "which", as if doing a carry chain. For every digit changed, change the membership of the corresponding element in "s". + int i = 0; + for (; i < n && which[i]; i++) { + which[i] = false; + s.set.Remove(elmts[i]); + } + if (i == n) { + // we have cycled through all the subsets + break; + } + which[i] = true; + s.set.Add(elmts[i]); + } + } + } + public bool Equals(Set other) { + return this.set.Count == other.set.Count && IsSubsetOf(other); + } + public override bool Equals(object other) { + return other is Set && Equals((Set)other); + } + public override int GetHashCode() { + var hashCode = 1; + foreach (var t in this.set) { + hashCode = hashCode * (t.GetHashCode()+3); + } + return hashCode; + } + public override string ToString() { + var s = "{"; + var sep = ""; + foreach (var t in this.set) { + s += sep + t.ToString(); + sep = ", "; + } + return s + "}"; + } + public bool IsProperSubsetOf(Set other) { + return this.set.Count < other.set.Count && IsSubsetOf(other); + } + public bool IsSubsetOf(Set other) { + if (other.set.Count < this.set.Count) + return false; + foreach (T t in this.set) { + if (!other.set.Contains(t)) + return false; + } + return true; + } + public bool IsSupersetOf(Set other) { + return other.IsSubsetOf(this); + } + public bool IsProperSupersetOf(Set other) { + return other.IsProperSubsetOf(this); + } + public bool IsDisjointFrom(Set other) { + HashSet a, b; + if (this.set.Count < other.set.Count) { + a = this.set; b = other.set; + } else { + a = other.set; b = this.set; + } + foreach (T t in a) { + if (b.Contains(t)) + return false; + } + return true; + } + public bool Contains(T t) { + return this.set.Contains(t); + } + public Set Union(Set other) { + if (this.set.Count == 0) + return other; + else if (other.set.Count == 0) + return this; + HashSet a, b; + if (this.set.Count < other.set.Count) { + a = this.set; b = other.set; + } else { + a = other.set; b = this.set; + } + var r = new HashSet(); + foreach (T t in b) + r.Add(t); + foreach (T t in a) + r.Add(t); + return new Set(r); + } + public Set Intersect(Set other) { + if (this.set.Count == 0) + return this; + else if (other.set.Count == 0) + return other; + HashSet a, b; + if (this.set.Count < other.set.Count) { + a = this.set; b = other.set; + } else { + a = other.set; b = this.set; + } + var r = new HashSet(); + foreach (T t in a) { + if (b.Contains(t)) + r.Add(t); + } + return new Set(r); + } + public Set Difference(Set other) { + if (this.set.Count == 0) + return this; + else if (other.set.Count == 0) + return this; + var r = new HashSet(); + foreach (T t in this.set) { + if (!other.set.Contains(t)) + r.Add(t); + } + return new Set(r); + } + } + public class MultiSet + { + Dictionary dict; + MultiSet(Dictionary d) { + dict = d; + } + public static MultiSet Empty { + get { + return new MultiSet(new Dictionary(0)); + } + } + public static MultiSet FromElements(params T[] values) { + Dictionary d = new Dictionary(values.Length); + foreach (T t in values) { + var i = 0; + if (!d.TryGetValue(t, out i)) { + i = 0; + } + d[t] = i + 1; + } + return new MultiSet(d); + } + public static MultiSet FromCollection(ICollection values) { + Dictionary d = new Dictionary(); + foreach (T t in values) { + var i = 0; + if (!d.TryGetValue(t, out i)) { + i = 0; + } + d[t] = i + 1; + } + return new MultiSet(d); + } + public static MultiSet FromSeq(Sequence values) { + Dictionary d = new Dictionary(); + foreach (T t in values.Elements) { + var i = 0; + if (!d.TryGetValue(t, out i)) { + i = 0; + } + d[t] = i + 1; + } + return new MultiSet(d); + } + public static MultiSet FromSet(Set values) { + Dictionary d = new Dictionary(); + foreach (T t in values.Elements) { + d[t] = 1; + } + return new MultiSet(d); + } + + public bool Equals(MultiSet other) { + return other.IsSubsetOf(this) && this.IsSubsetOf(other); + } + public override bool Equals(object other) { + return other is MultiSet && Equals((MultiSet)other); + } + public override int GetHashCode() { + var hashCode = 1; + foreach (var kv in dict) { + var key = kv.Key.GetHashCode(); + key = (key << 3) | (key >> 29) ^ kv.Value.GetHashCode(); + hashCode = hashCode * (key + 3); + } + return hashCode; + } + public override string ToString() { + var s = "multiset{"; + var sep = ""; + foreach (var kv in dict) { + var t = kv.Key.ToString(); + for (int i = 0; i < kv.Value; i++) { + s += sep + t.ToString(); + sep = ", "; + } + } + return s + "}"; + } + public bool IsProperSubsetOf(MultiSet other) { + return !Equals(other) && IsSubsetOf(other); + } + public bool IsSubsetOf(MultiSet other) { + foreach (T t in dict.Keys) { + if (!other.dict.ContainsKey(t) || other.dict[t] < dict[t]) + return false; + } + return true; + } + public bool IsSupersetOf(MultiSet other) { + return other.IsSubsetOf(this); + } + public bool IsProperSupersetOf(MultiSet other) { + return other.IsProperSubsetOf(this); + } + public bool IsDisjointFrom(MultiSet other) { + foreach (T t in dict.Keys) { + if (other.dict.ContainsKey(t)) + return false; + } + foreach (T t in other.dict.Keys) { + if (dict.ContainsKey(t)) + return false; + } + return true; + } + public bool Contains(T t) { + return dict.ContainsKey(t); + } + public MultiSet Union(MultiSet other) { + if (dict.Count == 0) + return other; + else if (other.dict.Count == 0) + return this; + var r = new Dictionary(); + foreach (T t in dict.Keys) { + var i = 0; + if (!r.TryGetValue(t, out i)) { + i = 0; + } + r[t] = i + dict[t]; + } + foreach (T t in other.dict.Keys) { + var i = 0; + if (!r.TryGetValue(t, out i)) { + i = 0; + } + r[t] = i + other.dict[t]; + } + return new MultiSet(r); + } + public MultiSet Intersect(MultiSet other) { + if (dict.Count == 0) + return this; + else if (other.dict.Count == 0) + return other; + var r = new Dictionary(); + foreach (T t in dict.Keys) { + if (other.dict.ContainsKey(t)) { + r.Add(t, other.dict[t] < dict[t] ? other.dict[t] : dict[t]); + } + } + return new MultiSet(r); + } + public MultiSet Difference(MultiSet other) { // \result == this - other + if (dict.Count == 0) + return this; + else if (other.dict.Count == 0) + return this; + var r = new Dictionary(); + foreach (T t in dict.Keys) { + if (!other.dict.ContainsKey(t)) { + r.Add(t, dict[t]); + } else if (other.dict[t] < dict[t]) { + r.Add(t, dict[t] - other.dict[t]); + } + } + return new MultiSet(r); + } + public IEnumerable Elements { + get { + List l = new List(); + foreach (T t in dict.Keys) { + int n; + dict.TryGetValue(t, out n); + for (int i = 0; i < n; i ++) { + l.Add(t); + } + } + return l; + } + } + } + + public class Map + { + Dictionary dict; + Map(Dictionary d) { + dict = d; + } + public static Map Empty { + get { + return new Map(new Dictionary()); + } + } + public static Map FromElements(params Pair[] values) { + Dictionary d = new Dictionary(values.Length); + foreach (Pair p in values) { + d[p.Car] = p.Cdr; + } + return new Map(d); + } + public static Map FromCollection(List> values) { + Dictionary d = new Dictionary(values.Count); + foreach (Pair p in values) { + d[p.Car] = p.Cdr; + } + return new Map(d); + } + public int Length { + get { return dict.Count; } + } + public long LongLength { + get { return dict.Count; } + } + public bool Equals(Map other) { + foreach (U u in dict.Keys) { + V v1, v2; + if (!dict.TryGetValue(u, out v1)) { + return false; // this shouldn't happen + } + if (!other.dict.TryGetValue(u, out v2)) { + return false; // other dictionary does not contain this element + } + if (!v1.Equals(v2)) { + return false; + } + } + foreach (U u in other.dict.Keys) { + if (!dict.ContainsKey(u)) { + return false; // this shouldn't happen + } + } + return true; + } + public override bool Equals(object other) { + return other is Map && Equals((Map)other); + } + public override int GetHashCode() { + var hashCode = 1; + foreach (var kv in dict) { + var key = kv.Key.GetHashCode(); + key = (key << 3) | (key >> 29) ^ kv.Value.GetHashCode(); + hashCode = hashCode * (key + 3); + } + return hashCode; + } + public override string ToString() { + var s = "map["; + var sep = ""; + foreach (var kv in dict) { + s += sep + kv.Key.ToString() + " := " + kv.Value.ToString(); + sep = ", "; + } + return s + "]"; + } + public bool IsDisjointFrom(Map other) { + foreach (U u in dict.Keys) { + if (other.dict.ContainsKey(u)) + return false; + } + foreach (U u in other.dict.Keys) { + if (dict.ContainsKey(u)) + return false; + } + return true; + } + public bool Contains(U u) { + return dict.ContainsKey(u); + } + public V Select(U index) { + return dict[index]; + } + public Map Update(U index, V val) { + Dictionary d = new Dictionary(dict); + d[index] = val; + return new Map(d); + } + public IEnumerable Domain { + get { + return dict.Keys; + } + } + } +#endif + public class Sequence + { + T[] elmts; + public Sequence(T[] ee) { + elmts = ee; + } + public static Sequence Empty { + get { + return new Sequence(new T[0]); + } + } + public static Sequence FromElements(params T[] values) { + return new Sequence(values); + } + public static Sequence FromString(string s) { + return new Sequence(s.ToCharArray()); + } + public int Length { + get { return elmts.Length; } + } + public long LongLength { + get { return elmts.LongLength; } + } + public T[] Elements { + get { + return elmts; + } + } + public IEnumerable UniqueElements { + get { + var st = Set.FromElements(elmts); + return st.Elements; + } + } + public T Select(ulong index) { + return elmts[index]; + } + public T Select(long index) { + return elmts[index]; + } + public T Select(uint index) { + return elmts[index]; + } + public T Select(int index) { + return elmts[index]; + } + public T Select(BigInteger index) { + return elmts[(int)index]; + } + public Sequence Update(long index, T t) { + T[] a = (T[])elmts.Clone(); + a[index] = t; + return new Sequence(a); + } + public Sequence Update(ulong index, T t) { + return Update((long)index, t); + } + public Sequence Update(BigInteger index, T t) { + return Update((long)index, t); + } + public bool Equals(Sequence other) { + int n = elmts.Length; + return n == other.elmts.Length && EqualUntil(other, n); + } + public override bool Equals(object other) { + return other is Sequence && Equals((Sequence)other); + } + public override int GetHashCode() { + if (elmts == null || elmts.Length == 0) + return 0; + var hashCode = 0; + for (var i = 0; i < elmts.Length; i++) { + hashCode = (hashCode << 3) | (hashCode >> 29) ^ elmts[i].GetHashCode(); + } + return hashCode; + } + public override string ToString() { + if (elmts is char[]) { + var s = ""; + foreach (var t in elmts) { + s += t.ToString(); + } + return s; + } else { + var s = "["; + var sep = ""; + foreach (var t in elmts) { + s += sep + t.ToString(); + sep = ", "; + } + return s + "]"; + } + } + bool EqualUntil(Sequence other, int n) { + for (int i = 0; i < n; i++) { + if (!elmts[i].Equals(other.elmts[i])) + return false; + } + return true; + } + public bool IsProperPrefixOf(Sequence other) { + int n = elmts.Length; + return n < other.elmts.Length && EqualUntil(other, n); + } + public bool IsPrefixOf(Sequence other) { + int n = elmts.Length; + return n <= other.elmts.Length && EqualUntil(other, n); + } + public Sequence Concat(Sequence other) { + if (elmts.Length == 0) + return other; + else if (other.elmts.Length == 0) + return this; + T[] a = new T[elmts.Length + other.elmts.Length]; + System.Array.Copy(elmts, 0, a, 0, elmts.Length); + System.Array.Copy(other.elmts, 0, a, elmts.Length, other.elmts.Length); + return new Sequence(a); + } + public bool Contains(T t) { + int n = elmts.Length; + for (int i = 0; i < n; i++) { + if (t.Equals(elmts[i])) + return true; + } + return false; + } + public Sequence Take(long m) { + if (elmts.LongLength == m) + return this; + T[] a = new T[m]; + System.Array.Copy(elmts, a, m); + return new Sequence(a); + } + public Sequence Take(ulong n) { + return Take((long)n); + } + public Sequence Take(BigInteger n) { + return Take((long)n); + } + public Sequence Drop(long m) { + if (m == 0) + return this; + T[] a = new T[elmts.Length - m]; + System.Array.Copy(elmts, m, a, 0, elmts.Length - m); + return new Sequence(a); + } + public Sequence Drop(ulong n) { + return Drop((long)n); + } + public Sequence Drop(BigInteger n) { + if (n.IsZero) + return this; + return Drop((long)n); + } + } + public struct Pair + { + public readonly A Car; + public readonly B Cdr; + public Pair(A a, B b) { + this.Car = a; + this.Cdr = b; + } + } + public partial class Helpers { + // Computing forall/exists quantifiers + public static bool QuantBool(bool frall, System.Predicate pred) { + if (frall) { + return pred(false) && pred(true); + } else { + return pred(false) || pred(true); + } + } + public static bool QuantChar(bool frall, System.Predicate pred) { + for (int i = 0; i < 0x10000; i++) { + if (pred((char)i) != frall) { return !frall; } + } + return frall; + } + public static bool QuantInt(BigInteger lo, BigInteger hi, bool frall, System.Predicate pred) { + for (BigInteger i = lo; i < hi; i++) { + if (pred(i) != frall) { return !frall; } + } + return frall; + } + public static bool QuantSet(Dafny.Set set, bool frall, System.Predicate pred) { + foreach (var u in set.Elements) { + if (pred(u) != frall) { return !frall; } + } + return frall; + } + public static bool QuantMap(Dafny.Map map, bool frall, System.Predicate pred) { + foreach (var u in map.Domain) { + if (pred(u) != frall) { return !frall; } + } + return frall; + } + public static bool QuantSeq(Dafny.Sequence seq, bool frall, System.Predicate pred) { + foreach (var u in seq.Elements) { + if (pred(u) != frall) { return !frall; } + } + return frall; + } + public static bool QuantDatatype(IEnumerable set, bool frall, System.Predicate pred) { + foreach (var u in set) { + if (pred(u) != frall) { return !frall; } + } + return frall; + } + // Enumerating other collections + public delegate Dafny.Set ComprehensionDelegate(); + public delegate Dafny.Map MapComprehensionDelegate(); + public static IEnumerable AllBooleans { + get { + yield return false; + yield return true; + } + } + public static IEnumerable AllChars { + get { + for (int i = 0; i < 0x10000; i++) { + yield return (char)i; + } + } + } + public static IEnumerable AllIntegers { + get { + yield return new BigInteger(0); + for (var j = new BigInteger(1);; j++) { + yield return j; + yield return -j; + } + } + } + public static IEnumerable IntegerRange(BigInteger lo, BigInteger hi) { + for (var j = lo; j < hi; j++) { + yield return j; + } + } + // pre: b != 0 + // post: result == a/b, as defined by Euclidean Division (http://en.wikipedia.org/wiki/Modulo_operation) + public static sbyte EuclideanDivision_sbyte(sbyte a, sbyte b) { + return (sbyte)EuclideanDivision_int(a, b); + } + public static short EuclideanDivision_short(short a, short b) { + return (short)EuclideanDivision_int(a, b); + } + public static int EuclideanDivision_int(int a, int b) { + if (0 <= a) { + if (0 <= b) { + // +a +b: a/b + return (int)(((uint)(a)) / ((uint)(b))); + } else { + // +a -b: -(a/(-b)) + return -((int)(((uint)(a)) / ((uint)(unchecked(-b))))); + } + } else { + if (0 <= b) { + // -a +b: -((-a-1)/b) - 1 + return -((int)(((uint)(-(a + 1))) / ((uint)(b)))) - 1; + } else { + // -a -b: ((-a-1)/(-b)) + 1 + return ((int)(((uint)(-(a + 1))) / ((uint)(unchecked(-b))))) + 1; + } + } + } + public static long EuclideanDivision_long(long a, long b) { + if (0 <= a) { + if (0 <= b) { + // +a +b: a/b + return (long)(((ulong)(a)) / ((ulong)(b))); + } else { + // +a -b: -(a/(-b)) + return -((long)(((ulong)(a)) / ((ulong)(unchecked(-b))))); + } + } else { + if (0 <= b) { + // -a +b: -((-a-1)/b) - 1 + return -((long)(((ulong)(-(a + 1))) / ((ulong)(b)))) - 1; + } else { + // -a -b: ((-a-1)/(-b)) + 1 + return ((long)(((ulong)(-(a + 1))) / ((ulong)(unchecked(-b))))) + 1; + } + } + } + public static BigInteger EuclideanDivision(BigInteger a, BigInteger b) { + if (0 <= a.Sign) { + if (0 <= b.Sign) { + // +a +b: a/b + return BigInteger.Divide(a, b); + } else { + // +a -b: -(a/(-b)) + return BigInteger.Negate(BigInteger.Divide(a, BigInteger.Negate(b))); + } + } else { + if (0 <= b.Sign) { + // -a +b: -((-a-1)/b) - 1 + return BigInteger.Negate(BigInteger.Divide(BigInteger.Negate(a) - 1, b)) - 1; + } else { + // -a -b: ((-a-1)/(-b)) + 1 + return BigInteger.Divide(BigInteger.Negate(a) - 1, BigInteger.Negate(b)) + 1; + } + } + } + // pre: b != 0 + // post: result == a%b, as defined by Euclidean Division (http://en.wikipedia.org/wiki/Modulo_operation) + public static sbyte EuclideanModulus_sbyte(sbyte a, sbyte b) { + return (sbyte)EuclideanModulus_int(a, b); + } + public static short EuclideanModulus_short(short a, short b) { + return (short)EuclideanModulus_int(a, b); + } + public static int EuclideanModulus_int(int a, int b) { + uint bp = (0 <= b) ? (uint)b : (uint)(unchecked(-b)); + if (0 <= a) { + // +a: a % b' + return (int)(((uint)a) % bp); + } else { + // c = ((-a) % b') + // -a: b' - c if c > 0 + // -a: 0 if c == 0 + uint c = ((uint)(unchecked(-a))) % bp; + return (int)(c == 0 ? c : bp - c); + } + } + public static long EuclideanModulus_long(long a, long b) { + ulong bp = (0 <= b) ? (ulong)b : (ulong)(unchecked(-b)); + if (0 <= a) { + // +a: a % b' + return (long)(((ulong)a) % bp); + } else { + // c = ((-a) % b') + // -a: b' - c if c > 0 + // -a: 0 if c == 0 + ulong c = ((ulong)(unchecked(-a))) % bp; + return (long)(c == 0 ? c : bp - c); + } + } + public static BigInteger EuclideanModulus(BigInteger a, BigInteger b) { + var bp = BigInteger.Abs(b); + if (0 <= a.Sign) { + // +a: a % b' + return BigInteger.Remainder(a, bp); + } else { + // c = ((-a) % b') + // -a: b' - c if c > 0 + // -a: 0 if c == 0 + var c = BigInteger.Remainder(BigInteger.Negate(a), bp); + return c.IsZero ? c : BigInteger.Subtract(bp, c); + } + } + public static Sequence SeqFromArray(T[] array) { + return new Sequence((T[])array.Clone()); + } + // In .NET version 4.5, it it possible to mark a method with "AggressiveInlining", which says to inline the + // method if possible. Method "ExpressionSequence" would be a good candidate for it: + // [System.Runtime.CompilerServices.MethodImpl(System.Runtime.CompilerServices.MethodImplOptions.AggressiveInlining)] + public static U ExpressionSequence(T t, U u) + { + return u; + } + + public static U Let(T t, Func f) { + return f(t); + } + + public delegate Result Function(Input input); + + public static A Id(A a) { + return a; + } + } + + public struct BigRational + { + public static readonly BigRational ZERO = new BigRational(0); + + BigInteger num, den; // invariant 1 <= den + public override string ToString() { + return string.Format("({0}.0 / {1}.0)", num, den); + } + public BigRational(int n) { + num = new BigInteger(n); + den = BigInteger.One; + } + public BigRational(BigInteger n, BigInteger d) { + // requires 1 <= d + num = n; + den = d; + } + public BigInteger ToBigInteger() { + if (0 <= num) { + return num / den; + } else { + return (num - den + 1) / den; + } + } + /// + /// Returns values such that aa/dd == a and bb/dd == b. + /// + private static void Normalize(BigRational a, BigRational b, out BigInteger aa, out BigInteger bb, out BigInteger dd) { + var gcd = BigInteger.GreatestCommonDivisor(a.den, b.den); + var xx = a.den / gcd; + var yy = b.den / gcd; + // We now have a == a.num / (xx * gcd) and b == b.num / (yy * gcd). + aa = a.num * yy; + bb = b.num * xx; + dd = a.den * yy; + } + public int CompareTo(BigRational that) { + // simple things first + int asign = this.num.Sign; + int bsign = that.num.Sign; + if (asign < 0 && 0 <= bsign) { + return -1; + } else if (asign <= 0 && 0 < bsign) { + return -1; + } else if (bsign < 0 && 0 <= asign) { + return 1; + } else if (bsign <= 0 && 0 < asign) { + return 1; + } + BigInteger aa, bb, dd; + Normalize(this, that, out aa, out bb, out dd); + return aa.CompareTo(bb); + } + public override int GetHashCode() { + return num.GetHashCode() + 29 * den.GetHashCode(); + } + public override bool Equals(object obj) { + if (obj is BigRational) { + return this == (BigRational)obj; + } else { + return false; + } + } + public static bool operator ==(BigRational a, BigRational b) { + return a.CompareTo(b) == 0; + } + public static bool operator !=(BigRational a, BigRational b) { + return a.CompareTo(b) != 0; + } + public static bool operator >(BigRational a, BigRational b) { + return 0 < a.CompareTo(b); + } + public static bool operator >=(BigRational a, BigRational b) { + return 0 <= a.CompareTo(b); + } + public static bool operator <(BigRational a, BigRational b) { + return a.CompareTo(b) < 0; + } + public static bool operator <=(BigRational a, BigRational b) { + return a.CompareTo(b) <= 0; + } + public static BigRational operator +(BigRational a, BigRational b) { + BigInteger aa, bb, dd; + Normalize(a, b, out aa, out bb, out dd); + return new BigRational(aa + bb, dd); + } + public static BigRational operator -(BigRational a, BigRational b) { + BigInteger aa, bb, dd; + Normalize(a, b, out aa, out bb, out dd); + return new BigRational(aa - bb, dd); + } + public static BigRational operator -(BigRational a) { + return new BigRational(-a.num, a.den); + } + public static BigRational operator *(BigRational a, BigRational b) { + return new BigRational(a.num * b.num, a.den * b.den); + } + public static BigRational operator /(BigRational a, BigRational b) { + // Compute the reciprocal of b + BigRational bReciprocal; + if (0 < b.num) { + bReciprocal = new BigRational(b.den, b.num); + } else { + // this is the case b.num < 0 + bReciprocal = new BigRational(-b.den, -b.num); + } + return a * bReciprocal; + } + } +} diff -Nru dafny-1.9.5/Binaries/dafny-server dafny-1.9.7/Binaries/dafny-server --- dafny-1.9.5/Binaries/dafny-server 1970-01-01 00:00:00.000000000 +0000 +++ dafny-1.9.7/Binaries/dafny-server 2016-06-05 21:11:14.000000000 +0000 @@ -0,0 +1,16 @@ +#!/usr/bin/env bash + +MONO=$(which mono) +DAFNYSERVER=$(dirname "${BASH_SOURCE[0]}")/DafnyServer.exe + +if [[ ! -x "$MONO" ]]; then + echo "Error: Dafny requires Mono to run on non-Windows systems." + exit 1 +fi + +if [[ ! -x "$DAFNYSERVER" ]]; then + echo "Error: DafnyServer.exe not found at $DAFNYSERVER." + exit 1 +fi + +"$MONO" "$DAFNYSERVER" "$@" Binary files /tmp/tmpmYgONk/eNTA8SFwit/dafny-1.9.5/Binaries/System.Collections.Immutable.dll and /tmp/tmpmYgONk/LnaTbJX_hb/dafny-1.9.7/Binaries/System.Collections.Immutable.dll differ Binary files /tmp/tmpmYgONk/eNTA8SFwit/dafny-1.9.5/Binaries/z3.exe and /tmp/tmpmYgONk/LnaTbJX_hb/dafny-1.9.7/Binaries/z3.exe differ diff -Nru dafny-1.9.5/debian/bin/dafny dafny-1.9.7/debian/bin/dafny --- dafny-1.9.5/debian/bin/dafny 2015-08-14 19:46:19.000000000 +0000 +++ dafny-1.9.7/debian/bin/dafny 2016-06-05 21:09:29.000000000 +0000 @@ -1,2 +1,2 @@ #!/bin/sh -exec /usr/bin/cli /usr/lib/dafny/Dafny.exe /z3exe:/usr/bin/z3 "$@" +exec /usr/bin/cli /usr/lib/dafny/Dafny.exe "$@" diff -Nru dafny-1.9.5/debian/changelog dafny-1.9.7/debian/changelog --- dafny-1.9.5/debian/changelog 2016-04-06 20:48:42.000000000 +0000 +++ dafny-1.9.7/debian/changelog 2016-06-05 21:10:39.000000000 +0000 @@ -1,3 +1,9 @@ +dafny (1.9.7-1) unstable; urgency=medium + + * New upstream release. + + -- Benjamin Barenblat Sun, 05 Jun 2016 17:10:37 -0400 + dafny (1.9.5-1) unstable; urgency=medium * Initial release. diff -Nru dafny-1.9.5/debian/control dafny-1.9.7/debian/control --- dafny-1.9.5/debian/control 2016-04-06 20:48:28.000000000 +0000 +++ dafny-1.9.7/debian/control 2016-06-05 18:49:02.000000000 +0000 @@ -9,8 +9,8 @@ libboogie-cil, mono-devel (>= 2.4.2.3), mono-reference-assemblies-4.0 -Standards-Version: 3.9.7 -Homepage: http://research.microsoft.com/en-us/projects/dafny/ +Standards-Version: 3.9.8 +Homepage: https://research.microsoft.com/en-us/projects/dafny/ Package: dafny Architecture: all diff -Nru dafny-1.9.5/debian/copyright dafny-1.9.7/debian/copyright --- dafny-1.9.5/debian/copyright 2015-08-14 20:13:11.000000000 +0000 +++ dafny-1.9.7/debian/copyright 2016-06-05 18:59:00.000000000 +0000 @@ -1,19 +1,23 @@ Format: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/ Upstream-Name: Dafny Source: https://dafny.codeplex.com/SourceControl/latest -Copyright: 2003-2015 Microsoft +Copyright: 2003-2016 Microsoft License: Ms-PL Files: * -Copyright: 2009-2015 Microsoft Corporation +Copyright: 2009-2016 Microsoft Corporation License: Ms-PL Files: Binaries/z3.exe Copyright: 2006-2014 Microsoft Corporation License: Expat +Files: Docs/DafnyRef/css.sty +Copyright: 2013 Microsoft Corporation +License: Apache-2.0 + Files: debian/* -Copyright: 2015 Benjamin Barenblat +Copyright: 2015, 2016 Benjamin Barenblat License: Apache-2.0 License: Apache-2.0 diff -Nru dafny-1.9.5/debian/gbp.conf dafny-1.9.7/debian/gbp.conf --- dafny-1.9.5/debian/gbp.conf 2015-08-30 20:36:51.000000000 +0000 +++ dafny-1.9.7/debian/gbp.conf 2016-06-05 20:12:00.000000000 +0000 @@ -1,2 +1,2 @@ [DEFAULT] -upstream-tree = 43361334dd4f9be3efe0cf7cdf984c140cd74b14 +upstream-tree = df5c5f547990c1f80ab7594a1f9287ee03a61754 diff -Nru dafny-1.9.5/debian/patches/series dafny-1.9.7/debian/patches/series --- dafny-1.9.5/debian/patches/series 1970-01-01 00:00:00.000000000 +0000 +++ dafny-1.9.7/debian/patches/series 2016-06-05 20:56:39.000000000 +0000 @@ -0,0 +1 @@ +use_system_z3.diff diff -Nru dafny-1.9.5/debian/patches/use_system_z3.diff dafny-1.9.7/debian/patches/use_system_z3.diff --- dafny-1.9.5/debian/patches/use_system_z3.diff 1970-01-01 00:00:00.000000000 +0000 +++ dafny-1.9.7/debian/patches/use_system_z3.diff 2016-06-05 21:02:33.000000000 +0000 @@ -0,0 +1,38 @@ +Description: Use system Z3 + Dafny by default looks for its vendored Z3. Modify it to look for the system + Z3 installation instead. +Forwarded: not-needed +Author: Benjamin Barenblat +--- a/Source/Dafny/DafnyOptions.cs ++++ b/Source/Dafny/DafnyOptions.cs +@@ -283,29 +283,7 @@ namespace Microsoft.Dafny + /// so we vendor a Windows version. + /// + private void SetZ3ExecutableName() { +- var platform = (int)System.Environment.OSVersion.Platform; +- +- // http://www.mono-project.com/docs/faq/technical/ +- var isUnix = platform == 4 || platform == 128; +- +- var z3binName = isUnix ? "z3" : "z3.exe"; +- var dafnyBinDir = System.IO.Path.GetDirectoryName(System.Reflection.Assembly.GetExecutingAssembly().Location); +- var z3BinDir = System.IO.Path.Combine(dafnyBinDir, "z3", "bin"); +- var z3BinPath = System.IO.Path.Combine(z3BinDir, z3binName); +- +- if (!System.IO.File.Exists(z3BinPath) && !isUnix) { +- // This is most likely a Windows user running from source without downloading z3 +- // separately; this is ok, since we vendor z3.exe. +- z3BinPath = System.IO.Path.Combine(dafnyBinDir, z3binName); +- } +- +- if (!System.IO.File.Exists(z3BinPath) && errorReporter != null) { +- var tok = new Bpl.Token(1, 1) { filename = "*** " }; +- errorReporter.Warning(MessageSource.Other, tok, "Could not find '{0}' in '{1}'.{2}Downloading and extracting a Z3 distribution to Dafny's 'Binaries' folder would solve this issue; for now, we'll rely on Boogie to find Z3.", +- z3binName, z3BinDir, System.Environment.NewLine); +- } else { +- Z3ExecutablePath = z3BinPath; +- } ++ Z3ExecutablePath = "/usr/bin/z3"; + } + + public override void Usage() { diff -Nru dafny-1.9.5/Docs/DafnyRef/css.sty dafny-1.9.7/Docs/DafnyRef/css.sty --- dafny-1.9.5/Docs/DafnyRef/css.sty 1970-01-01 00:00:00.000000000 +0000 +++ dafny-1.9.7/Docs/DafnyRef/css.sty 2016-06-05 21:11:14.000000000 +0000 @@ -0,0 +1,803 @@ +%--------------------------------------------------------------------------- +% Copyright 2013 Microsoft Corporation. +% +% This is free software; you can redistribute it and/or modify it under the +% terms of the Apache License, Version 2.0. A copy of the License can be +% found in the file "license.txt" at the root of this distribution. +%--------------------------------------------------------------------------- +\NeedsTeXFormat{LaTeX2e}[1995/12/01] + +\RequirePackage{iftex} +\RequirePackage{etoolbox} +\RequirePackage{xkeyval} +\RequirePackage[table]{xcolor} +\RequirePackage{mdframed} +\RequirePackage{graphicx} +\RequirePackage{tablefootnote} + +% font selection +\ifXeTeX\RequirePackage{fontspec}\else +\ifLuaTeX\RequirePackage{fontspec}\else +\providecommand{\fontspec}[2][]{} +\fi\fi + + +% Define CSS 17 standard colors +\definecolor{Red}{HTML}{FF0000} +\definecolor{Lime}{HTML}{00FF00} +\definecolor{Blue}{HTML}{0000FF} + +\definecolor{Yellow}{HTML}{FFFF00} +\definecolor{Cyan}{HTML}{00FFFF} +\definecolor{Magenta}{HTML}{FF00FF} + +\definecolor{Navy}{HTML}{000080} +\definecolor{Maroon}{HTML}{800000} +\definecolor{Green}{HTML}{008000} + +\definecolor{Teal}{HTML}{008080} +\definecolor{Purple}{HTML}{800080} +\definecolor{Olive}{HTML}{808000} + +\definecolor{Black}{HTML}{000000} +\definecolor{Dimgray}{HTML}{696969} +\definecolor{Gray}{HTML}{808080} +\definecolor{Darkgray}{HTML}{A9A9A9} +\definecolor{Silver}{HTML}{C0C0C0} +\definecolor{Lightgray}{HTML}{D3D3D3} +\definecolor{Gainsboro}{HTML}{DCDCDC} +\definecolor{Floralwhite}{HTML}{FFFAF0} +\definecolor{Ivory}{HTML}{FFFFF0} +\definecolor{White}{HTML}{FFFFFF} + +\definecolor{Orange}{HTML}{FFA500} +\definecolor{Aqua}{HTML}{00FFFF} +\definecolor{Fuchsia}{HTML}{FF00FF} + +\newcommand{\@swap}[2]{#2{#1}} +\newcommand{\@expandafter}[2]{\expandafter\@swap\expandafter{#2}{#1}} + +\newcommand{\eifstrequal}{\expandafter\ifstrequal\expandafter} +\newcommand{\eeifstrequal}[2]{\@expandafter{\eifstrequal{#1}}{#2}} + +\providecommand\providelength[1]{% + \begingroup + \escapechar\m@ne + \xdef\@gtempa{\string#1}% + \endgroup + \@ifundefined{\@gtempa}% + {\newskip#1}% + {}% +} + +% is a string an element of a list of (comma separated) strings +\newcommand{\eifstrelement}[4]{% + \def\@found{}% + \@for\@ii:=#2\do{% + \eeifstrequal{\@ii}{#1}{\def\@found{true}}{}% + }% + \ifdefvoid{\@found}{#4}{#3}% +} + +% do two lists of strings intersect? +\newcommand{\ifintersect}[4]{% + \def\@intersect{}% + \@for\@sname:=#1\do{% + \ifdefvoid{\@intersect}{% + \eifstrelement{\@sname}{#2}{\def\@intersect{true}}{}% + }{}% + }% + \ifdefvoid{\@intersect}{#4}{#3}% +} + +% get string head and tail +\def\strsplit#1{\expandafter\strsplitx#1\empty\empty\empty} +\def\strsplitx#1#2\empty{% + \edef\strhead{#1}% + \edef\strtail{#2}% +} + +% normalize colors: to lowercase and then capitalize +\newcommand{\cssDefNormalizeColor}[2]{% + \expandafter\@cssNormColor#2\empty{#1}\empty% +} +\def\@cssNormColor#1#2\empty#3\empty{% + \uppercase{\def\@hd{#1}}\lowercase{\def\@tl{#2}}% + \expandafter\global\expandafter\edef\csname #3\endcsname{\@hd\@tl}% +} + + +% --------------------------------------------------- +% Some TeX stuff to compose functions +% --------------------------------------------------- +\newcommand{\apptox}[2]{% apptox{\cmd1}{\cmd2} == newcommand{\cmd1'}[1]{\cmd1{\cmd2{#1}}} + \providecommand{#1}[1]{##1}% define it if necessary (as identity) + \protected@edef#1##1{#1{\protect #2{##1}}}% +} + +\newcommand{\pretox}[2]{% pretox{\cmd1}{\cmd2} == newcommand{\cmd1'}[1]{\cmd2{\cmd1{#1}}} + \providecommand{#1}[1]{##1}% + \protected@edef#1##1{\protect #2{#1{##1}}}% +} + +%------------------------------------------------------------- +% Save footnotes inside mdframed and minipage environments +%------------------------------------------------------------- +\newif\if@saveFootnotes +\newcommand{\cssSaveFootnotes}% + {\if@saveFootnotes\else% + \let\footnote\tablefootnote% + \fi% + \@saveFootnotestrue}% +\newcommand{\cssRestoreFootnotes}% + {\if@saveFootnotes\else% + \tfn@tablefootnoteprintout% + \gdef\tfn@fnt{0}% + \fi}% + +%------------------------------------------------------------- +% Setup mdframed with default values +%------------------------------------------------------------- +\newlength{\cssPixel}\setlength{\cssPixel}{0.4pt}% assume 180 dpi +\mdfsetup{% + leftmargin=0pt,% + rightmargin=0pt,% + skipabove=0pt,% + skipbelow=0pt,% + innertopmargin=0pt,% + innerbottommargin=0pt,% + innerleftmargin=0pt,% + innerrightmargin=0pt,% + middlelinewidth=0pt,% + linewidth=0pt,% + outerlinewidth=0pt,innerlinewidth=0pt% +} + + +% --------------------------------------------------- +% Basic command to process attributes passed to TeX +% --------------------------------------------------- +\newif\if@usewrap +\newcommand{\@doBefore}{} +\newcommand{\@doAfter}{} +\newcommand{\@wrapCmd}[1]{#1} + +\newcommand{\@cssUseCmd}{\renewcommand{\@wrapCmd}[1]{##1}\@usewraptrue} +\newcommand{\@cssUseEnv}{\renewcommand{\@doBefore}{}\renewcommand{\@doAfter}{}\@usewrapfalse} + +\newcommand{\@cssApplyCmd}[1]{{\@wrapCmd{#1}}} +\newcommand{\@cssApplyBefore}{\@doBefore{}} +\newcommand{\@cssApplyAfter}{\@doAfter{}} + +\newcommand{\@cssProcessAttrs}[2]{% + \setkeys*{cssx}{#1}\setrmkeys*{csspre}\setrmkeys*{css}\setrmkeys*{csspost}% defaults + \@cssApplyRulesFor{parentclass}{css}{\cssParentClass}% + \setkeys*{cssx}{#2}\setrmkeys*{csspre}\setrmkeys*{css}\setrmkeys*{csspost}% regular + \protected@edef\cssParentClass{\cssClass}% +} + + +\newcommand{\@cmdBefore}[2]{#1#2} +\newcommand{\@cmdAfter}[2]{#2#1} + +\newcommand{\cssWrapCmd}[1]{\apptox{\@wrapCmd}{#1}} +\newcommand{\cssDoBefore}[1]{\if@usewrap\cssWrapCmd{\@cmdBefore{#1}}\else #1\fi} +\newcommand{\cssDoAfter}[1]{\if@usewrap\cssWrapCmd{\@cmdAfter{#1}}\else\preto\@doAfter{#1}\fi} + +\newcommand{\cssDoEnv}[1]{\cssDoBefore{\protect\begin{#1}}\cssDoAfter{\protect\end{#1}}} +\newcommand{\cssDoEnvOpt}[2]{\cssDoBefore{\begin{#1}[#2]}\cssDoAfter{\end{#1}}} +\newcommand{\cssDoEnvArg}[2]{\cssDoBefore{\begin{#1}{#2}}\cssDoAfter{\end{#1}}} +\newcommand{\cssDoEnvArgII}[3]{\cssDoBefore{\begin{#1}{#2}{#3}}\cssDoAfter{\end{#1}}} + +\newcommand{\newKey}[4][]{\define@key{#2}{#3}[#1]{#4}} +\newcommand{\newLength}[2]{\providelength{#1}\setlength{#1}{#2}} + + +\newcommand{\@cssReset}{} +\newcommand{\cssAddReset}[1]{\appto{\@cssReset}{#1}} +\newcommand{\cssNewResetCommand}[2]{\newcommand{#1}{#2}\cssAddReset{\renewcommand{#1}{#2}}} + +\newlength{\cssFill} +\setlength{\cssFill}{2sp plus 1fill minus 2sp} % make \fill unequal to 0pt, and detectable as 2sp + +\newcommand{\cssNewLengthKey}[4][0pt]{% + \newLength{#4}{#1}% + \newKey{#2}{#3}{% + \ifstrequal{##1}{auto}{\setlength{#4}{\cssFill}}{\setlength{#4}{##1}}% + }% + \cssAddReset{\setlength{#4}{#1}}% +} + + +\newcommand{\cssNewKeyNoReset}[4]{% + \newcommand{#3}{#4}% + \newKey{#1}{#2}{\renewcommand{#3}{##1}}% +} + +\newcommand{\cssNewKey}[4]{% + \cssNewResetCommand{#3}{#4}% + \newKey{#1}{#2}{%(#2=##1)%debug key setting + \renewcommand{#3}{##1}}% +} +\newcommand{\cssNewKeyX}[5]{% + \cssNewResetCommand{#3}{#4}% + \newKey{#1}{#2}{\renewcommand{#3}{##1}#5{##1}}% +} +\newcommand{\cssNewListKey}[4]{% + \cssNewResetCommand{#3}{#4}% + \newKey{#1}{#2}{\appto{#3}{,##1}}% +} +\newcommand{\cssNewListKeyX}[5]{% + \cssNewResetCommand{#3}{#4}% + \newKey{#1}{#2}{\appto{#3}{,##1}#5{##1}}% +} +\newcommand{\cssNewPseudoKey}[3]{% + \newKey{#1}{#2}{\setkeys{#1}{#3}}% +} + +%------------------------------------------------------------- +% css: display +%------------------------------------------------------------- +\cssNewKey{css}{display}{\cssDisplay}{block} + +%------------------------------------------------------------- +% css: width, height, and margins +%------------------------------------------------------------- + +\cssNewLengthKey{css}{margin-left}{\cssMarginLeft} +\cssNewLengthKey{css}{margin-right}{\cssMarginRight} +\cssNewLengthKey{css}{margin-top}{\cssMarginTop} +\cssNewLengthKey{css}{margin-bottom}{\cssMarginBottom} +\cssNewLengthKey[1sp]{css}{width}{\cssWidth} +\cssNewLengthKey[1sp]{css}{height}{\cssHeight} +\cssNewKey{css}{vertical-align}{\cssVerticalAlign}{} + +\cssNewPseudoKey{css}{margin}{margin-top=#1,margin-bottom=#1,margin-left=#1,margin-right=#1} + + +\newcommand{\@cssProcessMargins}{% + \eifstrequal{\cssDisplay}{block}% + {\@cssBlockEndPar\@cssBlockMargins}% + {\eifstrequal{\cssDisplay}{block-inline}% + {\@cssBlockMargins}% + {\@cssInlineMargins}% + }% +} + +\newcommand{\@cssBlockEndPar}{% + \cssIfHasClass{para-continue,para-block}{}{\cssDoAfter{\relax\ifhmode\par\global\hangindent=0pt\fi}}% +} + +\newif\if@hasdim + +\newlength{\cssHeightFull} % height including padding and border +\newlength{\cssWidthFull} % width including padding and border +\newLength{\@cssMarginAfter}{0pt} +\newLength{\@cssParSkip}{\parskip} +\newLength{\@cssParIndent}{\parindent} +\newcommand{\@cssFixMathSpacing}{\strut\vspace{-\baselineskip}} % fixes weird abovedisplay skip spacing +\newcommand{\@cssBlockMargins}{% + \@hasdimfalse + \ifdim\cssWidth=1sp\setlength{\cssWidthFull}{1sp}\else\@hasdimtrue\fi + \ifdim\cssHeight=1sp\setlength{\cssHeightFull}{1sp}\else\@hasdimtrue\fi + \if@hasdim% + % set full height and width + \setlength{\cssWidthFull}{\dimexpr\cssWidth+\cssPaddingLeft+\cssPaddingRight\relax}% + \eifstrequal{\cssBorderLeftStyle}{none}{}{\addtolength{\cssWidthFull}{\cssBorderWidth}}% + \eifstrequal{\cssBorderRightStyle}{none}{}{\addtolength{\cssWidthFull}{\cssBorderWidth}}% + \setlength{\cssHeightFull}{\dimexpr\cssHeight+\cssPaddingTop+\cssPaddingBottom\relax}% + \eifstrequal{\cssBorderTopStyle}{none}{}{\addtolength{\cssHeightFull}{\cssBorderWidth}}% + \eifstrequal{\cssBorderBottomStyle}{none}{}{\addtolength{\cssHeightFull}{\cssBorderWidth}}% + % set default width? + \ifdim\cssWidth=1sp% in this case, cssWidthFull is just padding and borders + \setlength{\cssWidth}{\dimexpr\linewidth-\cssWidthFull-\cssMarginLeft-\cssMarginRight}% + \addtolength{\cssWidthFull}{\cssWidth}% + \fi% + %minipage + \ifdim\cssMarginTop=0pt\else\cssDoBefore{\vspace{\cssMarginTop}}\fi + \ifdim\cssMarginLeft=0pt\else\cssDoBefore{\hspace*{\cssMarginLeft}}\fi + \setlength{\@cssParIndent}{\parindent}% save parskip and parindent since minipage resets it + \setlength{\@cssParSkip}{\parskip}% + \eifstrequal{\cssVerticalAlign}{bottom}{\def\@cssValign{b}}% + {\eifstrequal{\cssVerticalAlign}{center}{\def\@cssValign{c}}% + {\eifstrequal{\cssVerticalAlign}{top}{\def\@cssValign{t}}% + {\def\@cssValign{c}}}}% including `middle` + \ifdim\cssHeight=1sp% + \cssDoBefore{\begin{minipage}[\@cssValign]{\cssWidthFull}}% + \else + \cssDoBefore{\begin{minipage}[\@cssValign][\cssHeightFull]{\cssWidthFull}}% + \fi + \cssDoBefore{\cssSaveFootnotes\setlength{\parskip}{\@cssParSkip}\setlength{\parindent}{\@cssParIndent}}% + %note: DoAfter prepends, so in opposite order + \ifdim\cssMarginBottom=0pt\else\cssDoAfter{\vspace{\cssMarginBottom}}\fi + \ifdim\cssMarginRight=0pt\else\cssDoAfter{\hspace*{\cssMarginRight}}\fi + \cssDoAfter{\end{minipage}\cssRestoreFootnotes}% + \else + % no height/width: trivlist + \@hasdimfalse + \ifdim\cssMarginLeft=0pt\else\@hasdimtrue\fi + \ifdim\cssMarginRight=0pt\else\@hasdimtrue\fi + \ifdim\cssMarginTop=0pt\else\@hasdimtrue\fi + \ifdim\cssMarginBottom=0pt\else\@hasdimtrue\fi + \if@hasdim + \setlength{\@cssMarginAfter}{\dimexpr\cssMarginBottom-\cssMarginTop\relax}% + \cssDoEnvArgII{list}{}{% + \leftmargin=\cssMarginLeft% + \rightmargin=\cssMarginRight% + \topsep=\cssMarginTop% + \itemsep=0pt% + \parsep=0pt% + \parskip=0pt% + \partopsep=0pt% + \listparindent=\parindent% + }% + \ifdim\@cssMarginAfter=0pt\else\cssDoAfter{\vspace{\@cssMarginAfter}}\fi% + \cssIfHasClass{math-display}% + {\cssDoBefore{\item\@cssFixMathSpacing}}% + {\cssDoBefore{\item}}% \fi + \fi + \fi +} + +\newcommand{\@cssHide}[1]{} +\newcommand{\@cssInlineMargins}{% + \ifdim\cssMarginLeft=0pt\else\cssDoBefore{\hspace*{\cssMarginLeft}}\fi + \ifdim\cssMarginRight=0pt\else\cssDoAfter{\hspace*{\cssMarginRight}}\fi + \ifdim\cssMarginBottom=0pt\else\cssDoBefore{\rule[-\cssMarginBottom]{0pt}{\cssMarginBottom}}\fi + \ifdim\cssMarginTop=0pt\else\cssDoBefore{\rule{0pt}{\dimexpr\baselineskip*0.7+\cssMarginTop\relax}}\fi + \eifstrequal{\cssDisplay}{hidden}{% + \cssWrapCmd{\@cssHide}% + }{}% +} + +%------------------------------------------------------------- +% css: Borders and padding +%------------------------------------------------------------- + +\cssNewLengthKey{css}{padding-left}{\cssPaddingLeft} +\cssNewLengthKey{css}{padding-right}{\cssPaddingRight} +\cssNewLengthKey{css}{padding-top}{\cssPaddingTop} +\cssNewLengthKey{css}{padding-bottom}{\cssPaddingBottom} + +\newlength{\cssBorderWidthTotal} +\cssNewLengthKey[\cssPixel]{css}{border-width}{\cssBorderWidth} +\cssNewKey{css}{border-color}{\cssBorderColor}{black} +\cssNewKey{css}{border-top-style}{\cssBorderTopStyle}{none} +\cssNewKey{css}{border-bottom-style}{\cssBorderBottomStyle}{none} +\cssNewKey{css}{border-left-style}{\cssBorderLeftStyle}{none} +\cssNewKey{css}{border-right-style}{\cssBorderRightStyle}{none} +\cssNewKey{css}{background-color}{\cssBackgroundColor}{white} + +\cssNewPseudoKey{css}{padding}{padding-top=#1,padding-bottom=#1,padding-right=#1,padding-left=#1} + +\cssNewPseudoKey{css}{border-style}% + {border-top-style=#1,border-bottom-style=#1,border-left-style=#1,border-right-style=#1} + +\newcommand{\@cssProcessPadding}{% + \eifstrequal{\cssDisplay}{block}% + {\@cssBlockPadding}% + {\eifstrequal{\cssDisplay}{block-inline}% + {\@cssBlockPadding}% + {\@cssInlinePadding}% + }} + +% Special math-framed environment that fixes vertical spacing around math display +\newenvironment{mdmathframed}[1][]% + {\begin{mdframed}[#1]\@cssFixMathSpacing}% + {\@cssFixMathSpacing\end{mdframed}} + + +\newif\if@needframe +\newLength{\@cssPaddingLength}{0pt} +\newcommand{\@cssFramedArgs}{} +\newcommand{\@cssBorderStyleAll}{} +\newcommand{\@cssBlockPadding}{% + \@needframefalse% + \eifstrequal{\cssBorderTopStyle}{none}{}{\@needframetrue}% + \eifstrequal{\cssBorderBottomStyle}{none}{}{\@needframetrue}% + \eifstrequal{\cssBorderLeftStyle}{none}{}{\@needframetrue}% + \eifstrequal{\cssBorderRightStyle}{none}{}{\@needframetrue}% + \eifstrequal{\cssBackgroundColor}{white}{}{\@needframetrue}% + \ifdim\cssPaddingTop=0pt\else\@needframetrue\fi + \ifdim\cssPaddingBottom=0pt\else\@needframetrue\fi + \ifdim\cssPaddingLeft=0pt\else\@needframetrue\fi + \ifdim\cssPaddingRight=0pt\else\@needframetrue\fi + \strsplit{\cssBackgroundColor}% + \eifstrequal{\strhead}{\#}% + {\definecolor{Temp}{HTML}{\strtail}\edef\@@bcolor{Temp}}% + {\cssDefNormalizeColor{@bcolor}{\cssBackgroundColor}\edef\@@bcolor{\@bcolor}}% + %\expandafter\lowercase\expandafter{\expandafter\def\expandafter\bcolor\expandafter{\cssBackgroundColor}}% + \if@needframe% + \cssDoAfter{\cssRestoreFootnotes}% first, because post commands are pre-pended + \renewcommand{\@cssFramedArgs}{% + innertopmargin=\the\cssPaddingTop,% + innerbottommargin=\the\cssPaddingBottom,% + innerleftmargin=\the\cssPaddingLeft,% + innerrightmargin=\the\cssPaddingRight,% + linewidth=\the\cssBorderWidth,% + linecolor=\cssBorderColor,% + backgroundcolor=\@@bcolor% + }% + \setlength{\cssBorderWidthTotal}{0pt}% + \eifstrequal{\cssBorderTopStyle}{none}{\appto{\@cssFramedArgs}{,topline=false}}{}% + \eifstrequal{\cssBorderBottomStyle}{none}{\appto{\@cssFramedArgs}{,bottomline=false}}{}% + \eifstrequal{\cssBorderLeftStyle}{none}{\appto{\@cssFramedArgs}{,leftline=false}}% + {\addtolength{\cssBorderWidthTotal}{\cssBorderWidth}}% + \eifstrequal{\cssBorderRightStyle}{none}{\appto{\@cssFramedArgs}{,rightline=false}}% + {\addtolength{\cssBorderWidthTotal}{\cssBorderWidth}}% + \cssIfHasClass{math-display}% + {\@expandafter{\cssDoEnvOpt{mdmathframed}}{\@cssFramedArgs}}% + {\@expandafter{\cssDoEnvOpt{mdframed}}{\@cssFramedArgs}}% + % insert a minipage if height or width was set so the frame is as large + \@hasdimfalse + \ifdim\cssWidth=1sp\else\@hasdimtrue\fi + \ifdim\cssHeight=1sp\else\@hasdimtrue\fi + \if@hasdim% + \ifdim\cssHeight=1sp% + \cssDoBefore{\begin{minipage}{\cssWidth}}% + \else + \cssDoBefore{\begin{minipage}[t][\cssHeight]{\cssWidth}}% + \fi + \cssDoBefore{\setlength{\parskip}{\@cssParSkip}\setlength{\parindent}{\@cssParIndent}}% + %note: DoAfter prepends, so in opposite order + \cssDoAfter{\end{minipage}}% + \fi + \cssDoBefore{\cssSaveFootnotes}% + \fi +} + +\newcommand{\@robustFramebox}[2]{% + \eifstrequal{\cssTextAlign}{center}{\framebox[#1][c]{#2}}% + {\eifstrequal{\cssTextAlign}{right}{\framebox[#1][r]{#2}}% + {\framebox[#1][l]{#2}}}% +} + +\newcommand{\@robustMakebox}[2]{% + \eifstrequal{\cssDisplay}{table-cell}% + {\@robustTableParbox{#1}{#2}}% + {\eifstrequal{\cssTextAlign}{center}{\makebox[#1][c]{#2}}% + {\eifstrequal{\cssTextAlign}{right}{\makebox[#1][r]{#2}}% + {\makebox[#1][l]{#2}}}}% +} + +\newcommand{\@robustRaisebox}[2]{% + \raisebox{#1}{#2}% +} + +\newcommand{\@robustHeight}[1]{% + \eifstrequal{\cssVerticalAlign}{top}% + {\raisebox{0pt}[0pt][\cssHeight]{#1}}% + {\eifstrequal{\cssVerticalAlign}{middle}% + {\raisebox{0pt}[0.5\cssHeight][0.5\cssHeight]{#1}}% + {\eifstrequal{\cssVerticalAlign}{baseline}% + {\raisebox{0pt}[\dimexpr\cssHeight-\depth\relax][\depth]{#1}}% + {\raisebox{0pt}[\cssHeight][0pt]{#1}}% bottom + }}% +} + +\newcommand{\@robustTableParbox}[2]{% + \eifstrequal{\cssVerticalAlign}{top}{\def\@cssValign{t}}% + {\eifstrequal{\cssVerticalAlign}{center}{\def\@cssValign{c}}% + {\eifstrequal{\cssVerticalAlign}{middle}{\def\@cssValign{c}}}% + {\def\@cssValign{b}}}% + \ifdim\cssHeight=1sp% + \parbox[\@cssValign]{#1}{#2}% + \else% + \parbox[\@cssValign][\cssHeight]{#1}{#2}% + \fi% +} + +\newcommand{\@cssInlinePadding}{% + \eifstrequal{\cssBackgroundColor}{}{}% + {\eifstrequal{\cssBackgroundColor}{white}{}% + {\strsplit{\cssBackgroundColor}% + \eifstrequal{\strhead}{\#}% + {\cssWrapCmd{\protect\colorbox[HTML]{\strtail}}}% + {\cssWrapCmd{\@robustColorbox{\cssBackgroundColor}}}% + }% + }% + \@needframefalse% + \eifstrequal{\cssBorderTopStyle}{none}{}{\@needframetrue}% + \eifstrequal{\cssBorderBottomStyle}{none}{}{\@needframetrue}% + \eifstrequal{\cssBorderLeftStyle}{none}{}{\@needframetrue}% + \eifstrequal{\cssBorderRightStyle}{none}{}{\@needframetrue}% + \if@needframe% + \setlength{\fboxrule}{\cssBorderWidth}% + \ifdim\cssWidth=1sp% + \cssWrapCmd{\fbox}% + \else + \cssWrapCmd{\@robustFramebox{\cssWidth}}% + \fi + \else + \ifdim\cssWidth=1sp\else + \cssWrapCmd{\@robustMakebox{\cssWidth}}% + \fi + \fi + % height? + \ifdim\cssHeight=1sp\else\cssWrapCmd{\@robustHeight}\fi + % raisebox? + \eifstrequal{\cssDisplay}{inline}{% + \eifstrequal{\cssVerticalAlign}{}{}% + {\eifstrequal{\cssVerticalAlign}{top}{}% + {\eifstrequal{\cssVerticalAlign}{bottom}{}% + {\eifstrequal{\cssVerticalAlign}{middle}{}% + {\eifstrequal{\cssVerticalAlign}{baseline}{}% + {\cssWrapCmd{\@robustRaisebox{\cssVerticalAlign}}% + }}}}}% + }{}% + % padding + \if@needframe + \setlength{\fboxsep}{\cssPaddingTop}% todo: define our own box so we can set paddingtop/bot separately + \ifdim\cssPaddingBottom>\fboxsep\setlength{\fboxsep}{\cssPaddingBottom}\fi + \ifdim\cssPaddingLeft=\fboxsep\else\hspace*{\dimexpr\cssPaddingLeft-\fboxsep\relax}\fi + \ifdim\cssPaddingRight=\fboxsep\else\hspace*{\dimexpr\cssPaddingRight-\fboxsep\relax}\fi + \else + \ifdim\cssPaddingLeft=0pt\else\cssDoBefore{\hspace*{\cssPaddingLeft}}\fi + \ifdim\cssPaddingRight=0pt\else\cssDoAfter{\hspace*{\cssPaddingRight}}\fi + \ifdim\cssPaddingBottom=0pt\else\cssDoBefore{\protect\rule[-\cssPaddingBottom]{0pt}{\cssPaddingBottom}}\fi + \ifdim\cssPaddingTop=0pt\else\cssDoBefore{\protect\rule{0pt}{\dimexpr\cssPaddingTop+0.8em\relax}}\fi + \fi +} + +%------------------------------------------------------------- +% css: Textalign, textindent etc +%------------------------------------------------------------- + +\cssNewLengthKey{css}{text-indent}{\cssTextIndent} +\cssNewKey{css}{text-align}{\cssTextAlign}{justify} +\cssNewLengthKey{css}{line-height}{\cssLineHeight} +\cssNewKey{css}{float}{\cssFloat}{} + +\DeclareRobustCommand{\@robustColor}[1]{% + \cssDefNormalizeColor{@fcolor}{#1}\color{\@fcolor}% +} +\DeclareRobustCommand{\@robustColorbox}[2]{% + \cssDefNormalizeColor{@bcolor}{#1}\colorbox{\@bcolor}{#2}% +} + + +\newcommand{\@cssProcessText}{% + \eifstrequal{\cssDisplay}{block}% + {\@cssBlockText}% + {\eifstrequal{\cssDisplay}{block-inline}% + {\@cssBlockText}% + {\eifstrequal{\cssDisplay}{table-cell}% + {\@cssBlockText}% + {\@cssInlineText}% + }}} + +\newcommand{\@cssBlockText}{% + \eifstrequal{\cssId}{}{}{\label{\cssId}}% set label + \eifstrequal{\cssTextAlign}{left}% + {\cssDoBefore{\protect\raggedright}}% + {\eifstrequal{\cssTextAlign}{right}% + {\cssDoBefore{\protect\raggedleft}}% + {\eifstrequal{\cssTextAlign}{center}% + {\cssDoBefore{\protect\centering}}% + {}}}% + \ifdim\cssLineHeight=0pt\else\setlength{\baselineskip}{\cssLineHeight}\fi + \noindent\ifdim\cssTextIndent=0pt\else\hspace*{\cssTextIndent}\fi +} + +\newcommand{\@cssInlineText}{% + \eifstrequal{\cssId}{}{}{\label{\cssId}}% set label + \eifstrequal{\cssFloat}{left}% + {\cssDoAfter{\hspace*{\fill}}}% + {\eifstrequal{\cssFloat}{right}% + {\cssDoBefore{\hspace*{\fill}}}% + {\eifstrequal{\cssFloat}{center}% + {\cssDoAfter{\hspace*{\fill}}\cssDoBefore{\hspace*{\fill}}}% + {}}}% + \ifdim\cssLineHeight=0pt\else\cssDoBefore{\rule{0pt}{\cssLineHeight}}\fi +} + +%------------------------------------------------------------- +% css: Font attributes +%------------------------------------------------------------- + +\cssNewKey{css}{font-weight}{\cssFontWeight}{} +\cssNewKey{css}{font-variant}{\cssFontVariant}{} +\cssNewKey{css}{font-style}{\cssFontStyle}{} +\cssNewKey{css}{font-size}{\cssFontSize}{} +\cssNewKey{css}{font-family}{\cssFontFamily}{} +\cssNewKey{css}{color}{\cssColor}{} +\cssNewKey{css}{penalty}{\cssPenalty}{} + +\newcommand{\@cssProcessFont}{% + % font family + \edef\@fontFamily{\cssFontFamily}% + \@for\@ii:=\cssFontFamily\do{% find the last argument in a comma separated list + \edef\@fontFamily{\@ii}% + }% + \eifstrequal{\@fontFamily}{}{}% quick test + {\eifstrequal{\@fontFamily}{monospace}% + {\cssDoBefore\ttfamily}% + {\eifstrequal{\@fontFamily}{serif}% + {\cssDoBefore\rmfamily}% + {\eifstrequal{\@fontFamily}{sans-serif}% + {\cssDoBefore\sffamily}% + {\eifstrequal{\@fontFamily}{normal}% + {\cssDoBefore\rmfamily}% + {\cssDoBefore{\fontspec{\@fontFamily}}}% + }}}}% + % + \eifstrequal{\cssFontWeight}{bold}% + {\cssDoBefore\bfseries}% + {\eifstrequal{\cssFontWeight}{normal}% + {\cssDoBefore\mdseries}% + {}}% + \eifstrequal{\cssFontVariant}{small-caps}% + {\cssDoBefore\scshape}% + {\eifstrequal{\cssFontVariant}{normal}% + {\cssDoBefore\upshape}% + {}}% + \eifstrequal{\cssFontStyle}{italic}% + {\cssDoBefore\itshape\hspace{-0.2ex}}% + {\eifstrequal{\cssFontStyle}{oblique}% + {\cssDoBefore\slshape}% + {\eifstrequal{\cssFontStyle}{normal}% + {\cssDoBefore\upshape}% + {}}}% + \eifstrequal{\cssFontSize}{}{}% quick test + {\eifstrequal{\cssFontSize}{xx-small}% + {\cssDoBefore\tiny}% + {\eifstrequal{\cssFontSize}{x-small}% + {\cssDoBefore\scriptsize}% + {\eifstrequal{\cssFontSize}{small}% + {\cssDoBefore\small}% + {\eifstrequal{\cssFontSize}{medium}% + {\cssDoBefore\normalsize}% + {\eifstrequal{\cssFontSize}{large}% + {\cssDoBefore\large}% + {\eifstrequal{\cssFontSize}{x-large}% + {\cssDoBefore\Large}% + {\eifstrequal{\cssFontSize}{xx-large}% + {\cssDoBefore\LARGE}% + {}}}}}}}}% + % + \eifstrequal{\cssColor}{}{}% + {\strsplit{\cssColor}% + \eifstrequal{\strhead}{\#}% + {\cssDoBefore{\protect\color[HTML]{\strtail}}}% + {\cssDoBefore{\@robustColor{\cssColor}}}% + }% + % + \eifstrequal{\cssPenalty}{}{}% + {\penalty \cssPenalty\relax}% +} + + + + +%------------------------------------------------------------- +% Generic css rules for certain classes, ids, or elements +%------------------------------------------------------------- +\newcommand{\cssRule}[3]{% + \@for\@ii:=#2\do{% + \csappto{@rule@#1@\@ii}{,#3}% + }% +}% + +\newcommand{\cssRuleDo}[3]{% + \@for\@ii:=#2\do{% + \csappto{@ruleDo@#1@\@ii}{#3}% + }% +}% + + +\newcommand{\@cssApplyRulesFor}[3]{% + \@for\@ii:=#3\do{% + \ifcsmacro{@rule@#1@\@ii}{% + \edef\@args{\csname @rule@#1@\@ii\endcsname}% + \@expandafter{\setkeys{#2}}{\@args}% + }{}% + }% +} + +\newcommand{\@cssApplyDoRulesFor}[3]{% + \@for\@ii:=#3\do{% + \ifcsmacro{@ruleDo@#1@\@ii}{% + \csname @ruleDo@#1@\@ii\endcsname% + }{}% + }% +} + +\newcommand{\cssIfHasClass}[3]{% + \def\@found{}% + \@for\@ii:=\cssClass\do{% + \@for\@cname:=#1\do{% + \eeifstrequal{\@ii}{\@cname}{% + \def\@found{true}% + }{}% + }% + }% + \ifdefvoid{\@found}{#3}{#2}% +} + + +\newcommand{\cssClassRule}[2]{\cssRule{class}{#1}{#2}} +\newcommand{\cssElemRule}[2]{\cssRule{elem}{#1}{#2}} +\newcommand{\cssIdRule}[2]{\cssRule{id}{#1}{#2}} + +\cssNewListKeyX{cssx}{class}{\cssClass}{}{\@cssApplyRulesFor{class}{css}} +\cssNewKeyX{cssx}{elem}{\cssElem}{}{\@cssApplyRulesFor{elem}{css}} +\cssNewKeyX{cssx}{id}{\cssId}{}{\@cssApplyRulesFor{id}{css}} + + +\newcommand{\cssClassRuleDo}[2]{\cssRuleDo{class}{#1}{#2}} +\newcommand{\cssClassRuleCmd}[2]{\cssClassRuleDo{#1}{\cssWrapCmd{#2}}} +\newcommand{\cssClassRuleDoBefore}[2]{\cssClassRuleDo{#1}{\cssDoBefore{#2}}} +\newcommand{\cssClassRuleDoAfter}[2]{\cssClassRuleDo{#1}{\cssDoAfter{#2}}} +\newcommand{\cssClassRuleEnv}[2]{\cssClassRuleDoBefore{#1}{\begin{#2}}\cssClassRuleDoAfter{#1}{#2}} + +\newcommand{\cssElemRuleDo}[2]{\cssRuleDo{class}{#1}{#2}} +\newcommand{\cssElemRuleCmd}[2]{\cssElemRuleDo{#1}{\cssWrapCmd{#2}}} +\newcommand{\cssElemRuleDoBefore}[2]{\cssElemRuleDo{#1}{\cssDoBefore{#2}}} +\newcommand{\cssElemRuleDoAfter}[2]{\cssElemRuleDo{#1}{\cssDoAfter{#2}}} +\newcommand{\cssElemRuleEnv}[2]{\cssElemRuleDo{#1}{\cssDoEnv{#2}}} + +\newcommand{\@cssClassDoRules}{\@cssApplyDoRulesFor{class}{css}{\cssClass}} +\newcommand{\@cssElemDoRules}{% + \@cssApplyDoRulesFor{elem}{css}{\cssElem}% + \@cssApplyDoRulesFor{id}{css}{\cssId}% +} + +\newcommand{\cssParentClass}{} +\newcommand{\cssParentClassRule}[2]{\cssRule{parentclass}{#1}{#2}} + + +%------------------------------------------------------------- +% +%------------------------------------------------------------- + +\newenvironment{cssBlockX}[2]% + {\@cssReset\@cssUseEnv\@cssProcessAttrs{#1}{#2}% + \@cssElemDoRules% + \@cssProcessMargins\@cssProcessPadding% + \@cssClassDoRules% + \@cssProcessText\@cssProcessFont% + \@cssApplyBefore}% + {\@cssApplyAfter} + +\newenvironment{cssBlock}[1][]% + {\begin{cssBlockX}{}{#1}}{\end{cssBlockX}} + +\newcommand{\cssInlineX}[4]% + {\@cssReset\@cssUseCmd\@cssProcessAttrs{display=inline,#1}{#2}% + \@cssElemDoRules% + \@cssProcessMargins\@cssProcessPadding% + \@cssClassDoRules% + #3% + \@cssProcessText\@cssProcessFont% + \@cssApplyCmd{#4}% + }% + +\newcommand{\cssInline}[2][]{\cssInlineX{}{#1}{}{#2}} +\newcommand{\cssInlineCmd}[3][]{\cssInlineX{}{#1}{\cssWrapCmd{#2}}{#3}} + +\newcommand{\cssNewBlockElem}[3]{% + \newenvironment{#1}[1][]{\begin{cssBlockX}{elem=#2,#3}{##1}}{\end{cssBlockX}}} + +\newcommand{\cssNewInlineElem}[3]{% + \newcommand{#1}[2][]{\cssInlineX{elem=#2,#3}{##1}{}{##2}}} + + +\newcommand{\cssNewInlineElemCmd}[4]{% + \newcommand{#1}[2][]{\cssInlineX{elem=#2,#3}{##1}{\cssWrapCmd{#4}}{##2}}} + +\newcommand{\cssInitKeys}[1]{% + \@cssReset\@cssUseCmd\@cssProcessAttrs{display=inline}{#1}% +} + +% cssText is just for font attributes; no padding or margins +\newcommand{\cssTextX}[2]% + {\@cssReset\@cssUseCmd\@cssProcessAttrs{display=inline}{#1}% + \@cssElemDoRules% + %\@cssProcessMargins\@cssProcessPadding% + \@cssClassDoRules% + \@cssProcessText\@cssProcessFont% + \@cssApplyCmd{#2}% + }% + + +\newcommand{\cssText}[2][]{\cssTextX{#1}{#2}} diff -Nru dafny-1.9.5/Docs/DafnyRef/DafnyRef.bib dafny-1.9.7/Docs/DafnyRef/DafnyRef.bib --- dafny-1.9.5/Docs/DafnyRef/DafnyRef.bib 1970-01-01 00:00:00.000000000 +0000 +++ dafny-1.9.7/Docs/DafnyRef/DafnyRef.bib 2016-06-05 21:11:14.000000000 +0000 @@ -0,0 +1,48 @@ +@Misc{Rise4fun:dafny, + author = {K. Rustan M. Leino}, + title = {Try Dafny In Your Browser}, + note = "Available at \url{http://rise4fun.com/Dafny}" +} +@Misc{MSR:dafny:main, + author = {K. Rustan M. Leino}, + title = {Main Microsoft Research Dafny Web page}, + note = "Available at \url{http://research.microsoft.com/en-us/projects/dafny}" +} +@Misc{MSR:dafny:source, + author = {K. Rustan M. Leino et al}, + title = {Dafny Source Code}, + note = "Available at \url{http://dafny.codeplex.com}" +} +@Misc{MSR:dafny:quickref, + author = {K. Rustan M. Leino}, + title = {Dafny Quick Reference}, + note = "Available at \url{http://research.microsoft.com/en-us/projects/dafny/reference.aspx}" +} +@Misc{Linz:Coco, + author = {Hanspeter M{\"{o}}ssenb{\"{o}}ck and Markus L{\"{o}}berbauer and Albrecht W{\"{o}}{\ss}}, + title = {The Compiler Generator Coco/R}, + howpublished = {Open source from University of Linz}, + year = 2013, + note = "Available at \url{http://www.ssw.uni-linz.ac.at/Research/Projects/Coco/}" +} +@Misc{LEINO:Dafny:Calc, + author = {K. Rustan M. Leino and Nadia Polikarpova}, + title = {Verified Calculations}, + howpublished = {Manuscript KRML 231}, + year = 2013, + note = "Available at \url{http://research.microsoft.com/en-us/um/people/leino/papers/krml231.pdf}" +} +@Misc{LEINO:Dafny:Coinduction, + author = {K. Rustan M. Leino and Michal Moskal}, + title = {Co-induction Simply: Automatic Co-inductive Proofs in a Program Verifier}, + howpublished = {Manuscript KRML 230}, + year = 2014, + note = "Available at \url{http://research.microsoft.com/en-us/um/people/leino/papers/krml230.pdf}" +} +@Misc{LEINO:Dafny:DynamicFrames, + author = {K. Rustan M. Leino}, + title = {Dynamic-frame specifications in Dafny}, + howpublished = {JML seminar, Dagstuhl, Germany}, + year = 2009, + note = "Available at \url{http://research.microsoft.com/en-us/um/people/leino/papers/dafny-jml-dagstuhl-2009.pptx}" +} diff -Nru dafny-1.9.5/Docs/DafnyRef/DafnyRef.mdk dafny-1.9.7/Docs/DafnyRef/DafnyRef.mdk --- dafny-1.9.5/Docs/DafnyRef/DafnyRef.mdk 1970-01-01 00:00:00.000000000 +0000 +++ dafny-1.9.7/Docs/DafnyRef/DafnyRef.mdk 2016-06-05 21:11:14.000000000 +0000 @@ -0,0 +1,8331 @@ + + +[TITLE] + +~ Abstract +This is the Dafny reference manual which describes the Dafny programming +language and how to use the Dafny verification system. +Parts of this manual are more tutorial in nature in order to help the +user understand how to do proofs with Dafny. +~ + +[TOC] + + +# Introduction + +Dafny [@Leino:Dafny:LPAR16] is a programming language with built-in specification constructs. +The Dafny static program verifier can be used to verify the functional +correctness of programs. + +The Dafny programming language is designed to support the static +verification of programs. It is imperative, sequential, supports generic +classes, methods and functions, dynamic allocation, inductive and +co-inductive datatypes, and specification constructs. The +specifications include pre- and postconditions, frame specifications +(read and write sets), and termination metrics. To further support +specifications, the language also offers updatable ghost variables, +recursive functions, and types like sets and sequences. Specifications +and ghost constructs are used only during verification; the compiler +omits them from the executable code. + +The Dafny verifier is run as part of the compiler. As such, a programmer +interacts with it much in the same way as with the static type +checker—when the tool produces errors, the programmer responds by +changing the program’s type declarations, specifications, and statements. + +The easiest way to try out [Dafny is in your web browser at +rise4fun](http://rise4fun.com/Dafny)[@Rise4fun:dafny]. Once you get a bit +more serious, you may prefer to [download](http://dafny.codeplex.com/) it +to run it on your machine. Although Dafny can be run from the command +line (on Windows or other platforms), the preferred way to run it is in +Microsoft Visual Studio 2012 (or newer) or using emacs, where the Dafny +verifier runs in the background while the programmer is editing the +program. + +The Dafny verifier is powered +by [Boogie](http://research.microsoft.com/boogie) +[@Boogie:Architecture;@Leino:Boogie2-RefMan;@LeinoRuemmer:Boogie2] +and [Z3](https://github.com/z3prover)[@deMouraBjorner:Z3:overview]. + +From verified programs, the Dafny compiler produces code (`.dll` or +`.exe`) for the .NET platform via intermediate C# files. However, the +facilities for interfacing with other .NET code are minimal. + +This is the reference manual for the Dafny verification system. It is +based on the following references: +[@Leino:Dafny:LPAR16;@MSR:dafny:main; +@MSR:dafny:source;@MSR:dafny:quickref; @LEINO:Dafny:Calc; +@LEINO:Dafny:Coinduction; +and the tutorials at @Rise4fun:dafny] + +The main part of the reference manual is in top down order except for an +initial section that deals with the lowest level constructs. + +[Co-induction Simply]: http://research.microsoft.com/en-us/um/people/leino/papers/krml230.pdf "Co-induction Simply: Automatic Co-inductive Proofs in a Program Verifier" + +## Dafny Example +To give a flavor of Dafny, here is the solution to a competition problem. + +``` +// VSComp 2010, problem 3, find a 0 in a linked list and return how many +// nodes were skipped until the first 0 (or end-of-list) was found. +// Rustan Leino, 18 August 2010. +// +// The difficulty in this problem lies in specifying what the return +// value 'r' denotes and in proving that the program terminates. Both of +// these are addressed by declaring a ghost field 'List' in each +// linked-list node, abstractly representing the linked-list elements +// from the node to the end of the linked list. The specification can +// now talk about that sequence of elements and can use 'r' as an index +// into the sequence, and termination can be proved from the fact that +// all sequences in Dafny are finite. +// +// We only want to deal with linked lists whose 'List' field is properly +// filled in (which can only happen in an acyclic list, for example). To +// that avail, the standard idiom in Dafny is to declare a predicate +// 'Valid()' that is true of an object when the data structure +// representing object's abstract value is properly formed. The +// definition of 'Valid()' is what one intuitively would think of as the +// ''object invariant'', and it is mentioned explicitly in method pre- +// and postconditions. As part of this standard idiom, one also declared +// a ghost variable 'Repr' that is maintained as the set of objects that +// make up the representation of the aggregate object--in this case, the +// Node itself and all its successors. + +class Node { + ghost var List: seq + ghost var Repr: set + var head: int + var next: Node + + predicate Valid() + reads this, Repr + { + this in Repr && + 1 <= |List| && List[0] == head && + (next == null ==> |List| == 1) && + (next != null ==> + next in Repr && next.Repr <= Repr && this !in next.Repr && + next.Valid() && next.List == List[1..]) + } + + static method Cons(x: int, tail: Node) returns (n: Node) + requires tail == null || tail.Valid() + ensures n != null && n.Valid() + ensures if tail == null then n.List == [x] + else n.List == [x] + tail.List + { + n := new Node; + n.head, n.next := x, tail; + if (tail == null) { + n.List := [x]; + n.Repr := {n}; + } else { + n.List := [x] + tail.List; + n.Repr := {n} + tail.Repr; + } + } +} + +method Search(ll: Node) returns (r: int) + requires ll == null || ll.Valid() + ensures ll == null ==> r == 0 + ensures ll != null ==> + 0 <= r && r <= |ll.List| && + (r < |ll.List| ==> ll.List[r] == 0 && 0 !in ll.List[..r]) && + (r == |ll.List| ==> 0 !in ll.List) +{ + if (ll == null) { + r := 0; + } else { + var jj,i := ll,0; + while (jj != null && jj.head != 0) + invariant jj != null ==> jj.Valid() && i + |jj.List| == |ll.List| && + ll.List[i..] == jj.List + invariant jj == null ==> i == |ll.List| + invariant 0 !in ll.List[..i] + decreases |ll.List| - i + { + jj := jj.next; + i := i + 1; + } + r := i; + } +} + +method Main() +{ + var list: Node := null; + list := list.Cons(0, list); + list := list.Cons(5, list); + list := list.Cons(0, list); + list := list.Cons(8, list); + var r := Search(list); + print "Search returns ", r, "\n"; + assert r == 1; +} +``` + + +# Lexical and Low Level Grammar +Dafny uses the Coco/R lexer and parser generator for its lexer and parser +()[@Linz:Coco]. +The Dafny input file to Coco/R is the `Dafny.atg` file in the source tree. +A Coco/R input file consists of code written in the target language +(⪚ C#) intermixed with these special sections: + +0. The Characters section which defines classes of characters that are used + in defining the lexer (Section [#sec-character-classes]). +1. The Tokens section which defines the lexical tokens (Section [#sec-tokens]). +2. The Productions section which defines the grammar. The grammar productions +are distributed in the later parts of this document in the parts where +those constructs are explained. + +The grammar presented in this document was derived from the `Dafny.atg` +file but has been simplified by removing details that, though needed by +the parser, are not needed to understand the grammar. In particular, the +following transformation have been performed. + +* The semantics actions, enclosed by "(." and ".)", where removed. +* There are some elements in the grammar used for error recovery + ("SYNC"). These were removed. +* There are some elements in the grammar for resolving conflicts + ("IF(b)"). These have been removed. +* Some comments related to Coco/R parsing details have been removed. +* A Coco/R grammar is an attributed grammar where the attributes enable + the productions to have input and output parameters. These attributes + were removed except that boolean input parameters that affect + the parsing are kept. + * In our representation we represent these + in a definition by giving the names of the parameters following + the non-terminal name. For example `entity1(allowsX)`. + * In the case of uses of the parameter, the common case is that the + parameter is just passed to a lower-level non-terminal. In that + case we just give the name, e.g. `entity2(allowsX)`. + * If we want to given an explicit value to a parameter, we specify it in + a keyword notation like this: `entity2(allowsX: true)`. + * In some cases the value to be passed depends on the grammatical context. + In such cases we give a description of the conditions under which the + parameter is true, enclosed in parenthesis. For example: + + `FunctionSignatureOrEllipsis_(allowGhostKeyword: ("method" present))` + + means that the `allowGhostKeyword` parameter is true if the + "method" keyword was given in the associated ``FunctionDecl``. + * Where a parameter affects the parsing of a non-terminal we will + explain the effect of the parameter. + + +The names of character sets and tokens start with a lower case +letter but the names of grammar non-terminals start with +an upper-case letter. + +The grammar uses Extended BNF notation. See the [Coco/R Referenced +manual](http://www.ssw.uni-linz.ac.at/Research/Projects/Coco/Doc/UserManual.pdf) +for details. But in summary: + +* identifiers starting with a lower case letter denote +terminal symbols, +* identifiers starting with an upper case letter denote nonterminal +symbols. +* Strings denote themselves. +* `=` separates the sides of a production, ⪚ `A = a b c` +* In the Coco grammars "." terminates a production, but for readability + in this document a production starts with the defined identifier in + the left margin and may be continued on subsequent lines if they + are indented. +* `|` separates alternatives, ⪚ `a b | c | d e` means `a b` or `c or d e` +* `(` `)` groups alternatives, ⪚ (a | b) c means a c or b c +* `[ ]` option, ⪚ `[a] b` means `a b` or `b` +* `{ }` iteration (0 or more times), ⪚ `{a} b` means `b` or `a b` or `a a b` or ... +* We allow `|` inside `[ ]` and `{ }`. So `[a | b]` is short for `[(a | b)]` + and `{a | b}` is short for `{(a | b)}`. +* The first production defines the name of the grammar, in this case `Dafny`. + +In addition to the Coco rules, for the sake of readability we have adopted +these additional conventions. + +* We allow `-` to be used. `a - b` means it matches if it matches `a` but not `b`. +* To aid in explaining the grammar we have added some additional productions +that are not present in the original grammar. We name these with a trailing +underscore. If you inline these where they are referenced, the result should +let you reconstruct the original grammar. + +**For the convenience of the reader, any references to character sets, +tokens, or grammar non-terminals in this document are hyper-links that +will link to the definition of the entity.** + +## Character Classes +This section defines character classes used later in the token definitions. +In this section backslash is used to start an escape sequence, so for example +'\n' denotes the single linefeed character. + +```` +letter = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" +```` +At present, a letter is an ASCII upper or lowercase letter. Other Unicode letters +are not supported. + +```` +digit = "0123456789" +```` +A digit is just one of the base-10 digits. + +```` +posDigit = "123456789" +```` +A ``posDigit`` is a digit, excluding 0. + +```` +hexdigit = "0123456789ABCDEFabcdef" +```` +A ``hexdigit`` character is a digit or one of the letters from 'A' to 'F' in either case. + +```` +special = "'_?" +```` +The _special_ characters are the characters in addition to alphanumeric characters +that are allowed to appear in a Dafny identifier. These are + +* `"'"` because mathematicians like to put primes on identifiers and some ML + programmers like to start names of type parameters with a "'". +* "_" because computer scientists expect to be able to have underscores in identifiers. +* "?" because it is useful to have "?" at the end of names of predicates, + e.g. "Cons?". + +```` +cr = '\r' +```` +A carriage return character. + +```` +lf = '\n' +```` +A line feed character. + +```` +tab = '\t' +```` +A tab character. + +```` +space = ' ' +```` +A space character. + +```` +nondigitIdChar = letter + special +```` +The characters that can be used in an identifier minus the digits. + +```` +idchar = nondigitIdChar + digit +```` +The characters that can be used in an identifier. + +```` +nonidchar = ANY - idchar +```` +Any character except those that can be used in an identifier. + +```` +charChar = ANY - '\'' - '\\' - cr - lf +```` +Characters that can appear in a character constant. + +```` +stringChar = ANY - '"' - '\\' - cr - lf +```` +Characters that can appear in a string constant. + +```` +verbatimStringChar = ANY - '"' +```` +Characters that can appear in a verbatim string. + +### Comments +Comments are in two forms. + +* They may go from "/*" to "*/" and be nested. +* They may go from "//" to the end of the line. + +## Tokens +As with most languages, Dafny syntax is defined in two levels. First the stream +of input characters is broken up into _tokens_. Then these tokens are parsed +using the Dafny grammar. The Dafny tokens are defined in this section. + +### Reserved Words +The following reserved words appear in the Dafny grammar and may not be used +as identifiers of user-defined entities: + +```` +reservedword = + "abstract" | "array" | "as" | "assert" | "assume" | "bool" | "break" | + "calc" | "case" | "char" | "class" | "codatatype" | "colemma" | + "constructor" | "copredicate" | "datatype" | "decreases" | + "default" | "else" | "ensures" | "exists" | "extends" | "false" | + "forall" | "free" | "fresh" | "function" | "ghost" | "if" | "imap" | "import" | + "in" | "include" | "inductive" | "int" | "invariant" | "iset" | "iterator" | "label" | + "lemma" | "map" | "match" | "method" | "modifies" | "modify" | + "module" | "multiset" | "nat" | "new" | "newtype" | "null" | "object" | + "old" | "opened" | "predicate" | "print" | "protected" | + "reads" | "real" | "refines" | "requires" | "return" | "returns" | "seq" | + "set" | "static" | "string" | "then" | "this" | "trait" | "true" | "type" | + "var" | "where" | "while" | "yield" | "yields" | arrayToken + +arrayToken = "array" [ posDigit { digit }] +```` + +An ``arrayToken`` is a reserved word that denotes an array type of +given rank. `array` is an array type of rank 1 (aka a vector). `array2` +is the type of two-dimensional arrays, etc. + +TODO: Is "_" is reserved word? + +### Identifiers + +```` +ident = nondigitIdChar { idchar } - arraytoken - chartoken - reservedword +```` +In general Dafny identifiers are sequences of ``idChar`` characters where +the first character is a ``nondigitIdChar``. However tokens that fit this pattern +are not identifiers if they look like an array type token, a character literal, +or a reserved work. + +### Digits +```` +digits = digit {['_'] digit} +```` + +A sequence of decimal digits, possibly interspersed with underscores for readability. Example: `1_234_567`. +```` +hexdigits = "0x" hexdigit {['_'] hexdigit} +```` + +A hexadecimal constant, possibly interspersed with underscores for readability. +Example: `0xffff_ffff`. + +```` +decimaldigits = digit {['_'] digit} '.' digit {['_'] digit} +```` +A decimal fraction constant, possibly interspersed with underscores for readability. +Example: `123_456.789_123`. + +### Escaped Character +In this section the "\\" characters are literal. +```` +escapedChar = + ( "\\\'" | "\\"" | "\\\\" | "\\0" | "\\n" | "\\r" | "\\t" + | "\\u" hexdigit hexdigit hexdigit hexdigit + ) +```` + +In Dafny character or string literals escaped characters may be used +to specify the presence of the delimiting quote, or back slash, +or null, or new line, or carriage return or tab, or the +Unicode character with given hexadecimal representation. + +### Character Constant Token +```` +charToken = "'" ( charChar | escapedChar ) "'" +```` + +A character constant is enclosed by "'" and includes either a character +from the ``charChar`` set, or an escaped character. Note that although Unicode +letters are not allowed in Dafny identifiers, Dafny does support Unicode +in its character and string constants and in its data. A character +constant has type `char`. + + +### String Constant Token +```` +stringToken = + '"' { stringChar | escapedChar } '"' + | '@' '"' { verbatimStringChar | '"' '"' } '"' +```` + +A string constant is either a normal string constant or a verbatim string constant. +A normal string constant is enclosed by '"' and can contain characters from the +``stringChar`` set and escapes. + +A verbatim string constant is enclosed between '@"' and '"' and can +consists of any characters (including newline characters) except that two +successive double quotes give a way to escape one quote character inside +the string. + +## Low Level Grammar Productions + +### Identifier Variations + +```` +Ident = ident +```` +The ``Ident`` non-terminal is just an ``ident`` token and represents an ordinary +identifier. + +```` +DotSuffix = + ( ident | digits | "requires" | "reads" ) +```` +When using the _dot_ notation to denote a component of a compound entity +the token following the ".", in addition to being an identifier, +can also be a natural number, or one of the keywords `requires` or `reads`. + +* Digits can be used to name fields of classes and destructors of + datatypes. For example, the built-in tuple datatypes have destructors + named 0, 1, 2, etc. Note that as a field or destructor name, internal + underscores matter, so 10 is different from 1_0. +* `m.requires` is used to denote the precondition for method m. +* `m.reads` is used to denote the things that method m may read. + +```` +NoUSIdent = ident - "_" { idChar } +```` +A ``NoUSIdent`` is an identifier except that identifiers with a **leading** +underscore are not allowed. The names of user-defined entities are +required to be ``NoUSIdent``s. We introduce more mnemonic names +for these below (e.g. ``ClassName``). + +```` +WildIdent = NoUSIdent | "_" +```` +Identifier, disallowing leading underscores, except the "wildcard" +identifier "_". When "_" appears it is replaced by a unique generated +identifier distinct from user identifiers. + +### NoUSIdent Synonyms +In the productions for the declaration of user-defined entities the name of the +user-defined entity is required to be an identifier that does not start +with an underscore, i.e., a ``NoUSIdent``. To make the productions more +mnemonic, we introduce the following synonyms for ``NoUSIdent``. + +```` +ModuleName = NoUSIdent +ClassName = NoUSIdent +TraitName = NoUSIdent +DatatypeName = NoUSIdent +DatatypeMemberName = NoUSIdent +NewtypeName = NoUSIdent +NumericTypeName = NoUSIdent +SynonymTypeName = NoUSIdent +IteratorName = NoUSIdent +TypeVariableName = NoUSIdent +MethodName = NoUSIdent +FunctionName = NoUSIdent +PredicateName = NoUSIdent +CopredicateName = NoUSIdent +LabelName = NoUSIdent +AttributeName = NoUSIdent +FieldIdent = NoUSIdent +```` +A ``FieldIdent`` is one of the ways to identify a field. The other is +using digits. + +### Qualified Names +A qualified name starts with the name of the top-level entity and then is followed by +zero or more ``DotSuffix``s which denote a component. Examples: + +* `Module.MyType1` +* `MyTuple.1` +* `MyMethod.requires` + +The grammar does not actually have a production for qualified names +except in the special case of a qualified name that is known to be +a module name, i.e. a ``QualifiedModuleName``. + +### Identifier-Type Combinations +In this section, we describe some nonterminals that combine an identifier and a type. + +```` +IdentType = WildIdent ":" Type +```` +In Dafny, a variable or field is typically declared by giving its name followed by +a ``colon`` and its type. An ``IdentType`` is such a construct. + +```` +GIdentType(allowGhostKeyword) = [ "ghost" ] IdentType +```` +A ``GIdentType`` is a typed entity declaration optionally preceded by "ghost". The _ghost_ +qualifier means the entity is only used during verification but not in the generated code. +Ghost variables are useful for abstractly representing internal state in specifications. +If `allowGhostKeyword` is false then "ghost" is not allowed. + +```` +LocalIdentTypeOptional = WildIdent [ ":" Type ] +```` +A ``LocalIdentTypeOptional`` is used when declaring local variables. In +such a case a value may be specified for the variable in which case the +type may be omitted because it can be inferred from the initial value. +The initial value value may also be omitted. + +```` +IdentTypeOptional = WildIdent [ ":" Type ] +```` +A ``IdentTypeOptional`` is typically used in a context where the type of the identifier +may be inferred from the context. Examples are in pattern matching or quantifiers. + +```` +TypeIdentOptional = [ "ghost" ] ( NoUSIdent | digits ) ":" ] Type +```` +``TypeIdentOptional``s are used in ``FormalsOptionalIds``. This represents situations +where a type is given but there may not be an identifier. + +```` +FormalsOptionalIds = "(" [TypeIdentOptional { "," TypeIdentOptional } ] ")" +```` +A ``FormalsOptionalIds`` is a formal parameter list in which the types are required +but the names of the parameters is optional. This is used in algebraic +datatype definitions. + +### Numeric Literals +```` +Nat = ( digits | hexdigits ) +```` +A ``Nat`` represents a natural number expressed in either decimal or hexadecimal. + +```` +Dec = (decimaldigits ) +```` +A ``Dec`` represents a decimal fraction literal. + +# Programs +```` +Dafny = { IncludeDirective_ } { TopDecl } EOF +```` +At the top level, a Dafny program (stored as files with extension `.dfy`) +is a set of declarations. The declarations introduce (module-level) +methods and functions, as well as types (classes, traits, inductive and +co-inductive datatypes, new_types, type synonyms, opaque types, and +iterators) and modules, where the order of introduction is irrelevant. A +class also contains a set of declarations, introducing fields, methods, +and functions. + +When asked to compile a program, Dafny looks for the existence of a +Main() method. If a legal Main() method is found, the compiler will emit +a `.EXE`; otherwise, it will emit a `.DLL`. + + (If there is more than one Main(), Dafny will try to emit an .EXE, but + this may cause the C# compiler to complain. One could imagine improving + this functionality so that Dafny will produce a polite error message in + this case.) + +In order to be a legal Main() method, the following must be true: + +* The method takes no parameters +* The method is not a ghost method +* The method has no requires clause +* The method has no modifies clause +* If the method is an instance (that is, non-static) method in a class, + then the enclosing class must not declare any constructor + +Note, however, that the following are allowed: + +* The method is allowed to be an instance method as long as the enclosing + class does not declare any constructor. In this case, the runtime + system will allocate an object of the enclosing class and will invoke + Main() on it. +* The method is allowed to have `ensures` clauses +* The method is allowed to have `decreases` clauses, including a + `decreases *`. (If Main() has a `decreases *`, then its execution may + go on forever, but in the absence of a `decreases *` on Main(), Dafny + will have verified that the entire execution will eventually + terminate.) + +An invocation of Dafny may specify a number of source files. +Each Dafny file follows the grammar of the ``Dafny`` non-terminal. + +It consists of a sequence of optional _include_ directives followed by top +level declarations followed by the end of the file. + +## Include Directives +```` +IncludeDirective_ = "include" stringToken +```` + +Include directives have the form ``"include" stringToken`` where +the string token is either a normal string token or a +verbatim string token. The ``stringToken`` is interpreted as the name of +a file that will be included in the Dafny source. These included +files also obey the ``Dafny`` grammar. Dafny parses and processes the +transitive closure of the original source files and all the included files, +but will not invoke the verifier on these unless they have been listed +explicitly on the command line. + +## Top Level Declarations +```` +TopDecl = { { DeclModifier } + ( SubModuleDecl + | ClassDecl + | DatatypeDecl + | NewtypeDecl + | SynonymTypeDecl + | IteratorDecl + | TraitDecl + | ClassMemberDecl(moduleLevelDecl: true) + } +```` +Top-level declarations may appear either at the top level of a Dafny file, +or within a ``SubModuleDecl``. A top-level declaration is one of the following +types of declarations which are described later. + +The ``ClassDecl``, ``DatatypeDecl``, ``NewtypeDecl``, +``SynonymTypeDecl``, ``IteratorDecl``, and ``TraitDecl`` declarations are +type declarations and are describe in Section [#sec-types]. Ordinarily +``ClassMemberDecl``s appear in class declarations but they can also +appear at the top level. In that case they are included as part of an +implicit top-level class and are implicitly `static` (but cannot be +declared as static). In addition a ``ClassMemberDecl`` that appears at +the top level cannot be a ``FieldDecl``. + +## Declaration Modifiers +```` +DeclModifier = + ( "abstract" | "ghost" | "static" | "protected" + | "extern" [ stringToken] + ) +```` + +Top level declarations may be preceded by zero or more declaration +modifiers. Not all of these are allowed in all contexts. + +The "abstract" modifiers may only be used for module declarations. +An abstract module can leave some entities underspecified. +Abstract modules are not compiled to C#. + +The ghost modifier is used to mark entities as being used for +specification only, not for compilation to code. + +The static modifier is used for class members that that +are associated with the class as a whole rather than with +an instance of the class. + +The protected modifier is used to control the visibility of the +body of functions. + +The extern modifier is used to alter the CompileName of +entities. The CompileName is the name for the entity +when translating to Boogie or C#. + +The following table shows modifiers that are available +for each of the kinds of declaration. In the table +we use already-ghost to denote that the item is not +allowed to have the ghost modifier because it is already +implicitly ghost. + ++--------------------------+---------------------------------------+ +| Declaration | allowed modifiers | ++--------------------------+---------------------------------------+ +| module | abstract | +| class | extern | +| trait | - | +| datatype or codatatype | - | +| field | ghost | +| newtype | - | +| synonym types | - | +| iterators | - | +| method | ghost static extern | +| lemma, colemma, comethod | already-ghost static protected | +| inductive lemma | already-ghost static | +| constructor | - | +| function (non-method) | already-ghost static protected | +| function method | already-ghost static protected extern | +| predicate (non-method) | already-ghost static protected | +| predicate method | already-ghost static protected extern | +| inductive predicate | already-ghost static protected | +| copredicate | already-ghost static protected | ++--------------------------+---------------------------------------+ + + +# Modules + +```` +SubModuleDecl = ( ModuleDefinition_ | ModuleImport_ ) +```` + +Structuring a program by breaking it into parts is an important part of +creating large programs. In Dafny, this is accomplished via _modules_. +Modules provide a way to group together related types, classes, methods, +functions, and other modules together, as well as control the scope of +declarations. Modules may import each other for code reuse, and it is +possible to abstract over modules to separate an implementation from an +interface. + +## Declaring New Modules +```` +ModuleDefinition_ = "module" { Attribute } ModuleName + [ [ "exclusively" ] "refines" QualifiedModuleName ] + "{" { TopDecl } "}" +QualifiedModuleName = Ident { "." Ident } +```` +A qualified name that is known to refer to a module. + +A new module is declared with the `module` keyword, followed by the name +of the new module, and a pair of curly braces ({}) enclosing the body +of the module: + +``` +module Mod { + ... +} +``` + +A module body can consist of anything that you could put at the top +level. This includes classes, datatypes, types, methods, functions, etc. + +``` +module Mod { + class C { + var f: int + method m() + } + datatype Option = A(int) | B(int) + type T + method m() + function f(): int +} +``` + +You can also put a module inside another, in a nested fashion: + +``` +module Mod { + module Helpers { + class C { + method doIt() + var f: int + } + } +} +``` + +Then you can refer to the members of the `Helpers` module within the +`Mod` module by prefixing them with "Helpers.". For example: + +``` +module Mod { + module Helpers { ... } + method m() { + var x := new Helpers.C; + x.doIt(); + x.f := 4; + } +} +``` + +Methods and functions defined at the module level are available like +classes, with just the module name prefixing them. They are also +available in the methods and functions of the classes in the same +module. + +``` +module Mod { + module Helpers { + function method addOne(n: nat): nat { + n + 1 + } + } + method m() { + var x := 5; + x := Helpers.addOne(x); // x is now 6 + } +} +``` + +## Importing Modules +```` +ModuleImport_ = "import" ["opened" ] ModuleName + [ "=" QualifiedModuleName + | "as" QualifiedModuleName ["default" QualifiedModuleName ] + ] + [ ";" ] +```` + +Declaring new submodules is useful, but sometimes you want to refer to +things from an existing module, such as a library. In this case, you +can _import_ one module into another. This is done via the `import` +keyword, and there are a few different forms, each of which has a +different meaning. The simplest kind is the concrete import, and has +the form `import A = B`. This declaration creates a reference to the +module `B` (which must already exist), and binds it to the new name +`A`. Note this new name, i.e. `A`, is only bound in the module containing +the import declaration; it does not create a global alias. For +example, if `Helpers` was defined outside of `Mod`, then we could import +it: + +``` +module Helpers { + ... +} +module Mod { + import A = Helpers + method m() { + assert A.addOne(5) == 6; + } +} +``` + +Note that inside `m()`, we have to use `A` instead of `Helpers`, as we bound +it to a different name. The name `Helpers` is not available inside `m()`, +as only names that have been bound inside `Mod` are available. In order +to use the members from another module, it either has to be declared +there with `module` or imported with `import`. + +We don't have to give `Helpers` a new name, though, if we don't want +to. We can write `import Helpers = Helpers` if we want to, and Dafny +even provides the shorthand `import Helpers` for this behavior. You +can't bind two modules with the same name at the same time, so +sometimes you have to use the = version to ensure the names do not +clash. + +The ``QualifiedModuleName`` in the ``ModuleImport_`` starts with a +sibling module of the importing module, or with a submodule of the +importing module. There is no wya to refer to the parent module, only +sibling modules (and their submodules). + +## Opening Modules + +Sometimes, prefixing the members of the module you imported with the +name is tedious and ugly, even if you select a short name when +importing it. In this case, you can import the module as `opened`, +which causes all of its members to be available without adding the +module name. The `opened` keyword must immediately follow `import`, if it +is present. For example, we could write the previous example as: + +``` +module Mod { + import opened Helpers + method m() { + assert addOne(5) == 6; + } +} +``` + +When opening modules, the newly bound members will have low priority, +so they will be hidden by local definitions. This means if you define +a local function called `addOne`, the function from `Helpers` will no +longer be available under that name. When modules are opened, the +original name binding is still present however, so you can always use +the name that was bound to get to anything that is hidden. + +``` +module Mod { + import opened Helpers + function addOne(n: nat): nat { + n - 1 + } + method m() { + assert addOne(5) == 6; // this is now false, + // as this is the function just defined + assert Helpers.addOne(5) == 6; // this is still true + } +} +``` + +If you open two modules that both declare members with the same name, +then neither member can be referred to without a module prefix, as it +would be ambiguous which one was meant. Just opening the two modules +is not an error, however, as long as you don't attempt to use members +with common names. The `opened` keyword can be used with any kind of +`import` declaration, including the module abstraction form. + +## Module Abstraction + +Sometimes, using a specific implementation is unnecessary; instead, +all that is needed is a module that implements some interface. In +that case, you can use an _abstract_ module import. In Dafny, this is +written `import A as B`. This means bind the name `A` as before, but +instead of getting the exact module `B`, you get any module which is a +_adheres_ of `B`. Typically, the module `B` may have abstract type +definitions, classes with bodyless methods, or otherwise be unsuitable +to use directly. Because of the way refinement is defined, any +refinement of `B` can be used safely. For example, if we start with: + +``` +module Interface { + function method addSome(n: nat): nat + ensures addSome(n) > n +} +module Mod { + import A as Interface + method m() { + assert 6 <= A.addSome(5); + } +} +``` + +then we can be more precise if we know that `addSome` actually adds +exactly one. The following module has this behavior. Further, the +postcondition is stronger, so this is actually a refinement of the +Interface module. + +``` +module Implementation { + function method addSome(n: nat): nat + ensures addSome(n) == n + 1 + { + n + 1 + } +} +``` + +We can then substitute `Implementation` for `A` in a new module, by +declaring a refinement of `Mod` which defines `A` to be `Implementation`. + +``` +module Mod2 refines Mod { + import A = Implementation + ... +} +``` + +You can also give an implementation directly, without introducing a +refinement, by giving a default to the abstract import: + +``` +module Interface { + function method addSome(n: nat): nat + ensures addSome(n) > n +} +module Mod { + import A as Interface default Implementation + method m() { + assert 6 <= A.addSome(5); + } +} +module Implementation { + function method addSome(n: nat): nat + ensures addSome(n) == n + 1 + { + n + 1 + } +} +module Mod2 refines Mod { + import A as Interface default Implementation + ... +} +``` + +Regardless of whether there is a default, the only things known about +`A` in this example is that it has a function `addSome` that returns a +strictly bigger result, so even with the default we still can't prove +that `A.addSome(5) == 6`, only that `6 <= A.addSome(5)`. + +When you refine an abstract import into a concrete one, or giving a +default, Dafny checkes that the concrete module is a +refinement of the abstract one. This means that the methods must have +compatible signatures, all the classes and datatypes with their +constructors and fields in the abstract one must be present in the +concrete one, the specifications must be compatible, etc. + +## Module Ordering and Dependencies + +Dafny isn't particular about which order the modules appear in, but +they must follow some rules to be well formed. As a rule of thumb, +there should be a way to order the modules in a program such that each +only refers to things defined **before** it in the source text. That +doesn't mean the modules have to be given in that order. Dafny will +figure out that order for you, assuming you haven't made any circular +references. For example, this is pretty clearly meaningless: + +``` +import A = B +import B = A +``` + +You can have import statements at the toplevel, and you can import +modules defined at the same level: + +``` +import A = B +method m() { + A.whatever(); +} +module B { ... } +``` + +In this case, everything is well defined because we can put `B` first, +followed by the `A` import, and then finally `m()`. If there is no +ordering, then Dafny will give an error, complaining about a cyclic +dependency. + +Note that when rearranging modules and imports, they have to be kept +in the same containing module, which disallows some pathological +module structures. Also, the imports and submodules are always +considered to be first, even at the toplevel. This means that the +following is not well formed: + +``` +method doIt() { } +module M { + method m() { + doIt(); + } +} +``` + +because the module `M` must come before any other kind of members, such +as methods. To define global functions like this, you can put them in +a module (called `Globals`, say) and open it into any module that needs +its functionality. Finally, if you import via a path, such as `import A += B.C`, then this creates a dependency of `A` on `B`, as we need to know +what `B` is (is it abstract or concrete, or a refinement?). + +## Name Resolution + +When Dafny sees something like `A.B.C`, how does it know what each part +refers to? The process Dafny uses to determine what identifier +sequences like this refer to is name resolution. Though the rules may +seem complex, usually they do what you would expect. Dafny first looks +up the initial identifier. Depending on what the first identifier +refers to, the rest of the identifier is looked up in the appropriate +context. + +In terms of the grammar, sequences like the above are represented as +a ``NameSegment`` followed by 0 or more ``Suffix``es. A ``Suffix`` is +more general and the form shown above would be for when the +``Suffix`` is an ``AugmentedDotSuffix_``. + +The resolution is different depending on whether it is in +an expression context or a type context. + +### Expression Context Name Resolution + +The leading ``NameSegment`` is resolved using the first following +rule that succeeds. + +0. Local variables, parameters and bound variables. These are things like + `x`, `y`, and `i` in `var x;, ... returns (y: int)`, and + `forall i :: ....` The declaration chosen is the match from the + innermost matching scope. + +1. If in a class, try to match a member of the class. If the member that + is found is not static an implicit `this` is inserted. This works for + fields, functions, and methods of the current class (if in a static + context, then only static methods and functions are allowed). You can + refer to fields of the current class either as `this.f` or `f`, + assuming of course that `f` hasn't be hidden by one of the above. You + can always prefix this if needed, which cannot be hidden. (Note, a + field whose name is a string of digits must always have some prefix.) + +2. If there is no ``Suffix``, then look for a datatype constructor, if + unambiguous. Any datatypes that don't need qualification (so the + datatype name itself doesn't need a prefix), and also have a uniquely + named constructor, can be referred to just by its name. So if + `datatype List = Cons(List) | Nil` is the only datatype that declares + `Cons` and `Nil` constructors, then you can write `Cons(Cons(Nil))`. + If the constructor name is not unique, then you need to prefix it with + the name of the datatype (for example `List.Cons(List.Nil)))`. This is + done per constructor, not per datatype. + +3. Look for a member of the enclosing module. + +4. Module-level (static) functions and methods + +TODO: Not sure about the following paragraph. +Opened modules are treated at each level, after the declarations in the +current module. Opened modules only affect steps 2, 3 and 5. If a +ambiguous name is found, an error is generated, rather than continuing +down the list. After the first identifier, the rules are basically the +same, except in the new context. For example, if the first identifier is +a module, then the next identifier looks into that module. Opened modules +only apply within the module it is opened into. When looking up into +another module, only things explicitly declared in that module are +considered. + +To resolve expression `E.id`: + +First resolve expression E and any type arguments. + +* If `E` resolved to a module `M`: + 0. If `E.id` is not followed by any further suffixes, look for + unambiguous datatype constructor. + 1. Member of module M: a sub-module (including submodules of imports), + class, datatype, etc. + 2. Static function or method. +* If `E` denotes a type: + 3. Look up id as a member of that type +* If `E` denotes an expression: + 4. Let T be the type of E. Look up id in T. + +### Type Context Name Resolution + +In a type context the priority of ``NameSegment`` resolution is: + +1. Type parameters. + +2. Member of enclosing module (type name or the name of a module). + +To resolve expression `E.id`: + +* If `E` resolved to a module `M`: + 0. Member of module M: a sub-module (including submodules of imports), + class, datatype, etc. +* If `E` denotes a type: + 1. If `allowDanglingDotName`: Return the type of `E` and the given `E.id`, + letting the caller try to make sense of the final dot-name. + TODO: I don't under this sentence. What is `allowDanglingDotName`? + +# Specifications +Specifications describe logical properties of Dafny methods, functions, +lambdas, iterators and loops. They specify preconditions, postconditions, +invariants, what memory locations may be read or modified, and +termination information by means of _specification clauses_. +For each kind of specification zero or more specification +clauses (of the type accepted for that type of specification) +may be given, in any order. + +We document specifications at these levels: + +- At the lowest level are the various kinds of specification clauses, + e.g. a ``RequiresClause_``. +- Next are the specifications for entities that need them, + e.g. a ``MethodSpec``. +- At the top level are the entity declarations that include + the specifications, e.g. ``MethodDecl``. + +This section documents the first two of these in a bottom-up manner. +We first document the clauses and then the specifications +that use them. + +## Specification Clauses + +### Requires Clause + +```` +RequiresClause_ = + "requires" Expression(allowLemma: false, allowLambda: false) +```` + +The **requires** clauses specify preconditions for methods, +functions, lambda expressions and iterators. Dafny checks +that the preconditions are met at all call sites. The +callee may then assume the preconditions hold on entry. + +If no **requires** clause is specified it is taken to be `true`. + +If more than one **requires** clause is given, then the +precondition is the conjunction of all of the expressions +from all of the **requires** clauses. + +### Ensures Clause + +```` +EnsuresClause_ = + "ensures" { Attribute } Expression(allowLemma: false, allowLambda: false) +ForAllEnsuresClause_ = + "ensures" Expression(allowLemma: false, allowLambda: true) +FunctionEnsuresClause_ = + "ensures" Expression(allowLemma: false, allowLambda: false) +```` + +An **ensures** clause specifies the post condition for a +method, function or iterator. + +If no **ensures** clause is specified it is taken to be `true`. + +If more than one **ensures** clause is given, then the +postcondition is the conjunction of all of the expressions +from all of the **ensures** clauses. + +TODO: In the present sources ``FunctionEnsuresClause_`` differs from +``EnsuresClause_`` only in that it is not allowed to specify +``Attribute``s. This seems like a bug and will likely +be fixed in a future version. + +### Decreases Clause +```` +DecreasesClause_(allowWildcard, allowLambda) = + "decreases" { Attribute } DecreasesList(allowWildcard, allowLambda) +FunctionDecreasesClause_(allowWildcard, allowLambda) = + "decreases" DecreasesList(allowWildcard, allowLambda) +```` + +```` +DecreasesList(allowWildcard, allowLambda) = + PossiblyWildExpression(allowLambda) + { "," PossiblyWildExpression(allowLambda) } +```` +If `allowWildcard` is false but one of the +``PossiblyWildExpression``s is a wild-card, an error is +reported. + +TODO: A ``FunctionDecreasesClause_`` is not allowed to specify +``Attribute``s. this will be fixed in a future version. + +**Decreases** clauses are used to prove termination in the +presence of recursion. if more than one **decreases** clause is given +it is as if a single **decreases** clause had been given with the +collected list of arguments. That is, + +``` +decreases A, B +decreases C, D +``` + +is equivalent to + +``` +decreases A, B, C, D +``` + +If any of the expressions in the **decreases** clause are wild (i.e. "*") +then proof of termination will be skipped. + +Termination metrics in Dafny, which are declared by **decreases** clauses, +are lexicographic tuples of expressions. At each recursive (or mutually +recursive) call to a function or method, Dafny checks that the effective +**decreases** clause of the callee is strictly smaller than the effective +**decreases** clause of the caller. + + What does "strictly smaller" mean? Dafny provides a built-in + well-founded order for every type and, in some cases, between types. For + example, the Boolean "false" is strictly smaller than "true", the + integer 78 is strictly smaller than 102, the set `{2,5}` is strictly + smaller than the set `{2,3,5}`, and for "s" of type `seq` where + `Color` is some inductive datatype, the color `s[0]` is strictly less than + `s` (provided `s` is nonempty). + +What does "effective decreases clause" mean? Dafny always appends a +"top" element to the lexicographic tuple given by the user. This top +element cannot be syntactically denoted in a Dafny program and it never +occurs as a run-time value either. Rather, it is a fictitious value, +which here we will denote \top, such that each value that can ever occur +in a Dafny program is strictly less than \top. Dafny sometimes also +prepends expressions to the lexicographic tuple given by the user. The +effective decreases clause is any such prefix, followed by the +user-provided decreases clause, followed by \top. We said "user-provided +decreases clause", but if the user completely omits a "decreases" clause, +then Dafny will usually make a guess at one, in which case the effective +decreases clause is any prefix followed by the guess followed by \top. +(If you're using the Dafny IDE in Visual Studio, you can hover the mouse +over the name of a recursive function or method, or the "while" keyword +for a loop, to see the "decreases" clause that Dafny guessed, if any.) + +Here is a simple but interesting example: the Fibonacci function. + +``` +function Fib(n: nat) : nat +{ + if n < 2 then n else Fib(n-2) + Fib(n-1) +} + +``` + +In this example, if you hover your mouse over the function name +you will see that Dafny has supplied a `**decreases** n` clause. + +Let's take a look at the kind of example where a mysterious-looking +decreases clause like "Rank, 0" is useful. + +Consider two mutually recursive methods, `A` and `B`: +``` +method A(x: nat) +{ + B(x); +} + +method B(x: nat) +{ + if x != 0 { A(x-1); } +} +``` + +To prove termination of `A` and `B`, Dafny needs to have effective +decreases clauses for A and B such that: + +* the measure for the callee `B(x)` is strictly smaller than the measure + for the caller `A(x)`, and + +* the measure for the callee `A(x-1)` is strictly smaller than the measure + for the caller `B(x)`. + +Satisfying the second of these conditions is easy, but what about the +first? Note, for example, that declaring both `A` and `B` with "decreases x" +does not work, because that won't prove a strict decrease for the call +from `A(x)` to `B(x)`. + +Here's one possibility (for brevity, we will omit the method bodies): +``` +method A(x: nat) + decreases x, 1 + +method B(x: nat) + decreases x, 0 +``` + +For the call from `A(x)` to `B(x)`, the lexicographic tuple `"x, 0"` is +strictly smaller than `"x, 1"`, and for the call from `B(x)` to `A(x-1)`, the +lexicographic tuple `"x-1, 1"` is strictly smaller than `"x, 0"`. + + Two things to note: First, the choice of "0" and "1" as the second + components of these lexicographic tuples is rather arbitrary. It could + just as well have been "false" and "true", respectively, or the sets + `{2,5}` and `{2,3,5}`. Second, the keyword **decreases** often gives rise to + an intuitive English reading of the declaration. For example, you might + say that the recursive calls in the definition of the familiar Fibonacci + function `Fib(n)` "decreases n". But when the lexicographic tuple contains + constants, the English reading of the declaration becomes mysterious and + may give rise to questions like "how can you decrease the constant 0?". + The keyword is just that---a keyword. It says "here comes a list of + expressions that make up the lexicographic tuple we want to use for the + termination measure". What is important is that one effective decreases + clause is compared against another one, and it certainly makes sense to + compare something to a constant (and to compare one constant to + another). + + We can simplify things a little bit by remembering that Dafny appends + \top to the user-supplied decreases clause. For the A-and-B example, + this lets us drop the constant from the **decreases** clause of A: + +``` + method A(x: nat) + decreases x + +method B(x: nat) + decreases x, 0 +``` + +The effective decreases clause of `A` is `"x, \top"` and the effective +decreases clause of `B` is `"x, 0, \top"`. These tuples still satisfy the two +conditions `(x, 0, \top) < (x, \top)` and `(x-1, \top) < (x, 0, \top)`. And +as before, the constant "0" is arbitrary; anything less than \top (which +is any Dafny expression) would work. + +Let's take a look at one more example that better illustrates the utility +of `\top`. Consider again two mutually recursive methods, call them `Outer` +and `Inner`, representing the recursive counterparts of what iteratively +might be two nested loops: +``` +method Outer(x: nat) +{ + // set y to an arbitrary non-negative integer + var y :| 0 <= y; + Inner(x, y); +} + +method Inner(x: nat, y: nat) +{ + if y != 0 { + Inner(x, y-1); + } else if x != 0 { + Outer(x-1); + } +} +``` +The body of `Outer` uses an assign-such-that statement to represent some +computation that takes place before `Inner` is called. It sets "y" to some +arbitrary non-negative value. In a more concrete example, `Inner` would do +some work for each "y" and then continue as `Outer` on the next smaller +"x". + +Using a **decreases** clause `"x, y"` for `Inner` seems natural, but if +we don't have any bound on the size of the `"y"` computed by `Outer`, +there is no expression we can write in **decreases** clause of `Outer` +that is sure to lead to a strictly smaller value for `"y"` when `Inner` +is called. `\top` to the rescue. If we arrange for the effective +decreases clause of `Outer` to be `"x, \top"` and the effective decreases +clause for `Inner` to be `"x, y, \top"`, then we can show the strict +decreases as required. Since `\top` is implicitly appended, the two +decreases clauses declared in the program text can be: +``` +method Outer(x: nat) + decreases x + +method Inner(x: nat, y: nat) + decreases x, y +``` +Moreover, remember that if a function or method has no user-declared +**decreases** clause, Dafny will make a guess. The guess is (usually) +the list of arguments of the function/method, in the order given. This is +exactly the decreases clauses needed here. Thus, Dafny successfully +verifies the program without any explicit decreases clauses: +``` +method Outer(x: nat) +{ + var y :| 0 <= y; + Inner(x, y); +} + +method Inner(x: nat, y: nat) +{ + if y != 0 { + Inner(x, y-1); + } else if x != 0 { + Outer(x-1); + } +} +``` +The ingredients are simple, but the end result may seem like magic. For many users, however, there may be no magic at all -- the end result may be so natural that the user never even has to bothered to think about that there was a need to prove termination in the first place. + + +### Framing +```` +FrameExpression(allowLemma, allowLambda) = + ( Expression(allowLemma, allowLambda) [ FrameField ] + | FrameField ) +```` + +```` +FrameField = "`" Ident +```` + +```` +PossiblyWildFrameExpression(allowLemma) = + ( "*" | FrameExpression(allowLemma, allowLambda: false) ) +```` + +Frame expressions are used to denote the set of memory locations +that a Dafny program element may read or write. A frame +expression is a set expression. The form `{}` (that is, the empty set) +says that no memory locations may be modified, +which is also the default if no **modifies** clause is given explicitly. + +Note that framing only applies to the heap, or memory accessed through +references. Local variables are not stored on the heap, so they cannot be +mentioned (well, they are not in scope in the declaration) in reads +annotations. Note also that types like sets, sequences, and multisets are +value types, and are treated like integers or local variables. Arrays and +objects are reference types, and they are stored on the heap (though as +always there is a subtle distinction between the reference itself and the +value it points to.) + +The ``FrameField`` construct is used to specify a field of a +class object. The identifier following the back-quote is the +name of the field being referenced. +If the `FrameField` is preceded by an expression the expression +must be a reference to an object having that field. +If the `FrameField` is not preceded by an expression then +the frame expression is referring to that field of the current +object. This form is only used from a method of a class. + +The use of ``FrameField`` is discouraged as in practice it has not +been shown to either be more concise or to perform better. +Also, there's (unfortunately) no form of it for array +elements---one could imagine + +``` + modifies a`[j] +``` +Also, ``FrameField`` is not taken into consideration for +lambda expressions. + +### Reads Clause +```` +FunctionReadsClause_ = + "reads" + PossiblyWildFrameExpression (allowLemma: false) + { "," PossiblyWildFrameExpression(allowLemma: false) } +LambdaReadsClause_ = + "reads" PossiblyWildFrameExpression(allowLemma: true) +IteratorReadsClause_ = + "reads" { Attribute } + FrameExpression(allowLemma: false, allowLambda: false) + { "," FrameExpression(allowLemma: false, allowLambda: false) } +PossiblyWildExpression(allowLambda) = + ( "*" | Expression(allowLemma: false, allowLambda) ) +```` + +Functions are not allowed to have side effects but may be restricted in +what they can read. The _reading frame_ of a function (or predicate) is all +the memory locations that the function is allowed to read. The reason we +might limit what a function can read is so that when we write to memory, +we can be sure that functions that did not read that part of memory have +the same value they did before. For example, we might have two arrays, +one of which we know is sorted. If we did not put a reads annotation on +the sorted predicate, then when we modify the unsorted array, we cannot +determine whether the other array stopped being sorted. While we might be +able to give invariants to preserve it in this case, it gets even more +complex when manipulating data structures. In this case, framing is +essential to making the verification process feasible. + +It is not just the body of a function that is subject to **reads** +checks, but also its precondition and the **reads** clause itself. + +A reads clause can list a wildcard ("*"), which allows the enclosing +function to read anything. In many cases, and in particular in all cases +where the function is defined recursively, this makes it next to +impossible to make any use of the function. Nevertheless, as an +experimental feature, the language allows it (and it is sound). +Note that a "*" makes the rest of the frame expression irrelevant. + +A **reads** clause specifies the set of memory locations that a function, +lambda, or iterator may read. If more than one **reads** clause is given +in a specification the effective read set is the union of the sets +specified. If there are no **reads** clauses the effective read set is +empty. If `"*"` is given in a **reads** clause it means any memory may be +read. + +TODO: It would be nice if the different forms of read clauses could be +combined. In a future version the single form of read clause will allow +a list and attributes. + +### Modifies Clause + +```` +ModifiesClause_ = + "modifies" { Attribute } + FrameExpression(allowLemma: false, allowLambda: false) + { "," FrameExpression(allowLemma: false, allowLambda: false) } +```` + +Frames also affect methods. As you might have guessed, methods are not +required to list the things they read. Methods are allowed to read +whatever memory they like, but they are required to list which parts of +memory they modify, with a modifies annotation. They are almost identical +to their reads cousins, except they say what can be changed, rather than +what the value of the function depends on. In combination with reads, +modification restrictions allow Dafny to prove properties of code that +would otherwise be very difficult or impossible. Reads and modifies are +one of the tools that allow Dafny to work on one method at a time, +because they restrict what would otherwise be arbitrary modifications of +memory to something that Dafny can reason about. + +Note that fields of newly allocated objects can always be modified. + +It is also possible to frame what can be modified by a block statement +by means of the block form of the +[modify statement](#sec-modify-statement) (Section [#sec-modify-statement]). + +A **modifies** clause specifies the set of memory locations that a +method, iterator or loop body may modify. If more than one **modifies** +clause is given in a specification, the effective modifies set is the +union of the sets specified. If no **modifies** clause is given the +effective modifies set is empty. A loop can also have a +**modifies** clause. If none is given, the loop gets to modify anything +the enclosing context is allowed to modify. + +### Invariant Clause +```` +InvariantClause_ = + "invariant" { Attribute } + Expression(allowLemma: false, allowLambda: true) +```` + +An **invariant** clause is used to specify an invariant +for a loop. If more than one **invariant** clause is given for +a loop the effective invariant is the conjunction of +the conditions specified. + +The invariant must hold on entry to the loop. And assuming it +is valid on entry, Dafny must be able to prove that it then +holds at the end of the loop. + +## Method Specification +```` +MethodSpec = + { ModifiesClause_ + | RequiresClause_ + | EnsuresClause_ + | DecreasesClause_(allowWildcard: true, allowLambda: false) + } +```` + +A method specification is zero or more **modifies**, **requires**, +**ensures** or **decreases** clauses, in any order. +A method does not have **reads** clauses because methods are allowed to +read any memory. + +## Function Specification +```` +FunctionSpec = + { RequiresClause_ + | FunctionReadsClause_ + | FunctionEnsuresClause_ + | FunctionDecreasesClause_(allowWildcard: false, allowLambda: false) + } +```` + +A function specification is zero or more **reads**, **requires**, +**ensures** or **decreases** clauses, in any order. A function +specification does not have **modifies** clauses because functions are not +allowed to modify any memory. + +## Lambda Specification +```` +LambdaSpec_ = + { LambdaReadsClause_ + | RequiresClause_ + } +```` + +A lambda specification is zero or more **reads** or **requires** clauses. +Lambda specifications do not have **ensures** clauses because the body +is never opaque. +Lambda specifications do not have **decreases** +clauses because they do not have names and thus cannot be recursive. A +lambda specification does not have **modifies** clauses because lambdas +are not allowed to modify any memory. + +## Iterator Specification +```` +IteratorSpec = + { IteratorReadsClause_ + | ModifiesClause_ + | [ "yield" ] RequiresClause_ + | [ "yield" ] EnsuresClause_ + | DecreasesClause_(allowWildcard: false, allowLambda: false) + } +```` + +An iterator specification applies both to the iterator's constructor +method and to its `MoveNext` method. The **reads** and **modifies** +clauses apply to both of them. For the **requires** and **ensures** +clauses, if `yield` is not present they apply to the constructor, +but if `yield` is present they apply to the `MoveNext` method. + +TODO: What is the meaning of a **decreases** clause on an iterator? +Does it apply to `MoveNext`? Make sure our description of +iterators explains these. + +TODO: What is the relationship between the post condition and +the `Valid()` predicate? + +## Loop Specification +```` +LoopSpec = + { InvariantClause_ + | DecreasesClause_(allowWildcard: true, allowLambda: true) + | ModifiesClause_ + } +```` + +A loop specification provides the information Dafny needs to +prove properties of a loop. The ``InvariantClause_`` clause +is effectively a precondition and it along with the +negation of the loop test condition provides the postcondition. +The ``DecreasesClause_`` clause is used to prove termination. + +# Types +```` +Type = DomainType [ "->" Type ] +```` +A Dafny type is a domain type (i.e. a type that can be the domain of a +function type) optionally followed by an arrow and a range type. + +```` +DomainType = + ( BoolType_ | CharType_ | NatType_ | IntType_ | RealType_ | ObjectType_ + | FiniteSetType_ | InfiniteSetType_ | MultisetType_ + | SequenceType_ | StringType_ + | FiniteMapType_ | InfiniteMapType_ | ArrayType_ + | TupleType_ | NamedType_ ) +```` +The domain types comprise the builtin scalar types, the builtin +collection types, tuple types (including as a special case +a parenthesized type) and reference types. + + +Dafny types may be categorized as either value types or reference types. + +## Value Types +The value types are those whose values do not lie in the program heap. +These are: + +* The basic scalar types: `bool`, `char`, `nat`, `int`, `real` +* The built-in collection types: `set`, `multiset`, `seq`, `string`, `map`, `imap` +* Tuple Types +* Inductive and co-inductive types + +Data items having value types are passed by value. Since they are not +considered to occupy _memory_, framing expressions do not reference them. + +## Reference Types +Dafny offers a host of _reference types_. These represent +_references_ to objects allocated dynamically in the program heap. To +access the members of an object, a reference to (that is, a _pointer_ +to or _object identity_ of) the object is _dereferenced_. + +The reference types are class types, traits and array types. + +The special value `null` is part of every reference +type.[^fn-nullable] + +[^fn-nullable]: This will change in a future version of Dafny that + will support both nullable and (by default) non-null reference + types. + +## Named Types +```` +NamedType_ = NameSegmentForTypeName { "." NameSegmentForTypeName } +```` + +A ``NamedType_`` is used to specify a user-defined type by name +(possibly module-qualified). Named types are introduced by +class, trait, inductive, co-inductive, synonym and opaque +type declarations. They are also used to refer to type variables. + +```` +NameSegmentForTypeName = Ident [ GenericInstantiation ] +```` +A ``NameSegmentForTypeName`` is a type name optionally followed by a +``GenericInstantiation`` which supplies type parameters to a generic +type, if needed. It is a special case of a ``NameSegment`` +(See Section [#sec-name-segment]) +that does not allow a ``HashCall``. + +The following sections describe each of these kinds of types in more detail. + +# Basic types + +Dafny offers these basic types: `bool` for booleans, `char` for +characters, `int` and `nat` for integers, and `real` for reals. + +## Booleans +```` +BoolType_ = "bool" +```` + +There are two boolean values and each has a corresponding literal in +the language: `false` and `true`. + +In addition to equality (`==`) and disequality (`!=`), which are +defined on all types, type `bool` supports the following operations: + ++--------------------+------------------------------------+ +| operator | description | ++--------------------+------------------------------------+ +| `<==>` | equivalence (if and only if) | ++--------------------+------------------------------------+ +| `==>` | implication (implies) | +| `<==` | reverse implication (follows from) | ++--------------------+------------------------------------+ +| `&&` | conjunction (and) | +| [\|\|]{.monospace} | disjunction (or) | ++--------------------+------------------------------------+ +| `!` | negation (not) | ++--------------------+------------------------------------+ + +Negation is unary; the others are binary. The table shows the operators +in groups of increasing binding power, with equality binding stronger +than conjunction and disjunction, and weaker than negation. Within +each group, different operators do not associate, so parentheses need +to be used. For example, +``` +A && B || C // error +``` +would be ambiguous and instead has to be written as either +``` +(A && B) || C +``` +or +``` +A && (B || C) +``` +depending on the intended meaning. + +### Equivalence Operator +The expressions `A <==> B` and `A == B` give the same value, but note +that `<==>` is _associative_ whereas `==` is _chaining_. So, +``` +A <==> B <==> C +``` +is the same as +``` +A <==> (B <==> C) +``` +and +``` +(A <==> B) <==> C +``` +whereas +``` +A == B == C +``` +is simply a shorthand for +``` +A == B && B == C +``` + +### Conjunction and Disjunction +Conjunction is associative and so is disjunction. These operators are +_short circuiting (from left to right)_, meaning that their second +argument is evaluated only if the evaluation of the first operand does +not determine the value of the expression. Logically speaking, the +expression `A && B` is defined when `A` is defined and either `A` +evaluates to `false` or `B` is defined. When `A && B` is defined, its +meaning is the same as the ordinary, symmetric mathematical +conjunction ∧. The same holds for `||` and ∨. + +### Implication and Reverse Implication +Implication is _right associative_ and is short-circuiting from left +to right. Reverse implication `B <== A` is exactly the same as +`A ==> B`, but gives the ability to write the operands in the opposite +order. Consequently, reverse implication is _left associative_ and is +short-circuiting from _right to left_. To illustrate the +associativity rules, each of the following four lines expresses the +same property, for any `A`, `B`, and `C` of type `bool`: +``` +A ==> B ==> C +A ==> (B ==> C) // parentheses redundant, since ==> is right associative +C <== B <== A +(C <== B) <== A // parentheses redundant, since <== is left associative +``` +To illustrate the short-circuiting rules, note that the expression +`a.Length` is defined for an array `a` only if `a` is not `null` (see +Section [#sec-reference-types]), which means the following two +expressions are well-formed: +``` +a != null ==> 0 <= a.Length +0 <= a.Length <== a != null +``` +The contrapositive of these two expressions would be: +``` +a.Length < 0 ==> a == null // not well-formed +a == null <== a.Length < 0 // not well-formed +``` +but these expressions are not well-formed, since well-formedness +requires the left (and right, respectively) operand, `a.Length < 0`, +to be well-formed by itself. + +Implication `A ==> B` is equivalent to the disjunction `!A || B`, but +is sometimes (especially in specifications) clearer to read. Since, +`||` is short-circuiting from left to right, note that +``` +a == null || 0 <= a.Length +``` +is well-formed, whereas +``` +0 <= a.Length || a == null // not well-formed +``` +is not. + +In addition, booleans support _logical quantifiers_ (forall and +exists), described in section [#sec-quantifier-expression]. + + +## Numeric types + +```` +IntType_ = "int" +RealType_ = "real" +```` + +Dafny supports _numeric types_ of two kinds, _integer-based_, which +includes the basic type `int` of all integers, and _real-based_, which +includes the basic type `real` of all real numbers. User-defined +numeric types based on `int` and `real`, called _newtypes_, are +described in Section [#sec-newtypes]. Also, the _subset type_ +`nat`, representing the non-negative subrange of `int`, is described +in Section [#sec-subset-types]. + +The language includes a literal for each non-negative integer, like +`0`, `13`, and `1985`. Integers can also be written in hexadecimal +using the prefix "`0x`", as in `0x0`, `0xD`, and `0x7c1` (always with +a lower case `x`, but the hexadecimal digits themselves are case +insensitive). Leading zeros are allowed. To form negative integers, +use the unary minus operator. + +There are also literals for some of the non-negative reals. These are +written as a decimal point with a nonempty sequence of decimal digits +on both sides. For example, `1.0`, `1609.344`, and `0.5772156649`. + +For integers (in both decimal and hexadecimal form) and reals, +any two digits in a literal may be separated by an underscore in order +to improve human readability of the literals. For example: +``` +1_000_000 // easier to read than 1000000 +0_12_345_6789 // strange but legal formatting of 123456789 +0x8000_0000 // same as 0x80000000 -- hex digits are often placed in groups of 4 +0.000_000_000_1 // same as 0.0000000001 -- 1 \([Ångström]{.comment-color}\) +``` + +In addition to equality and disequality, numeric types +support the following relational operations: + ++-----------------+------------------------------------+ +| operator | description | ++-----------------+------------------------------------+ +| [<]{.monospace} | less than | +| `<=` | at most | +| `>=` | at least | +| `>` | greater than | ++-----------------+------------------------------------+ + +Like equality and disequality, these operators are chaining, as long +as they are chained in the "same direction". That is, +``` +A <= B < C == D <= E +``` +is simply a shorthand for +``` +A <= B && B < C && C == D && D <= E +``` +whereas +``` +A < B > C +``` +is not allowed. + +There are also operators on each numeric type: + ++---------------+------------------------------------+ +| operator | description | ++---------------+------------------------------------+ +| `+` | addition (plus) | +| `-` | subtraction (minus) | ++---------------+------------------------------------+ +| `*` | multiplication (times) | +| `/` | division (divided by) | +| `%` | modulus (mod) | ++---------------+------------------------------------+ +| `-` | negation (unary minus) | ++---------------+------------------------------------+ + +The binary operators are left associative, and they associate with +each other in the two groups. The groups are listed in order of +increasing binding power, with equality binding more strongly than the +multiplicative operators and weaker than the unary operator. +Modulus is supported only for integer-based numeric types. Integer +division and modulus are the _Euclidean division and modulus_. This +means that modulus always returns a non-negative, regardless of the +signs of the two operands. More precisely, for any integer `a` and +non-zero integer `b`, +``` +a == a / b * b + a % b +0 <= a % b < B +``` +where `B` denotes the absolute value of `b`. + +Real-based numeric types have a member `Trunc` that returns the +_floor_ of the real value, that is, the largest integer not exceeding +the real value. For example, the following properties hold, for any +`r` and `r'` of type `real`: +``` +3.14.Trunc == 3 +(-2.5).Trunc == -3 +-2.5.Trunc == -2 +real(r.Trunc) <= r +r <= r' ==> r.Trunc <= r'.Trunc +``` +Note in the third line that member access (like `.Trunc`) binds +stronger than unary minus. The fourth line uses the conversion +function `real` from `int` to `real`, as described in Section +[#sec-numeric-conversion-operations]. + +## Characters + +```` +CharType_ = "char" +```` + +Dafny supports a type `char` of _characters_. Character literals are +enclosed in single quotes, as in `'D'`. Their form is described +by the ``charToken`` nonterminal in the grammar. To write a single quote as a +character literal, it is necessary to use an _escape sequence_. +Escape sequences can also be used to write other characters. The +supported escape sequences are as follows: + ++--------------------+------------------------------------------------------------+ +| escape sequence | meaning | ++--------------------+------------------------------------------------------------+ +| `\'` | the character `'` | +| [\\\"]{.monospace} | the character [\"]{.monospace} | +| `\\` | the character `\` | +| `\0` | the null character, same as `\u0000` | +| `\n` | line feed | +| `\r` | carriage return | +| `\t` | horizontal tab | +| `\u\(_xxxx_\)` | universal character whose hexadecimal code is `\(_xxxx_\)` | ++--------------------+------------------------------------------------------------+ + +The escape sequence for a double quote is redundant, because +[\'\"\']{.monospace} and [\'\\\"\']{.monospace} denote the same +character---both forms are provided in order to support the same +escape sequences as for string literals (Section [#sec-strings]). +In the form `\u\(_xxxx_\)`, the `u` is always lower case, but the four +hexadecimal digits are case insensitive. + +Character values are ordered and can be compared using the standard +relational operators: + ++-----------------+------------------------------------+ +| operator | description | ++-----------------+------------------------------------+ +| [<]{.monospace} | less than | +| `<=` | at most | +| `>=` | at least | +| `>` | greater than | ++-----------------+------------------------------------+ + +Sequences of characters represent _strings_, as described in Section +[#sec-strings]. + +The only other operations on characters are obtaining a character +by indexing into a string, and the implicit conversion to string +when used as a parameter of a `print` statement. + +TODO: Are there any conversions between `char` values and numeric values? + +# Type parameters + +```` +GenericParameters = "<" TypeVariableName [ "(" "==" ")" ] + { "," TypeVariableName [ "(" "==" ")" ] } ">" +```` +Many of the types (as well as functions and methods) in Dafny can be +parameterized by types. These _type parameters_ are typically +declared inside angle brackets and can stand for any type. + +It is sometimes necessary to restrict these type parameters so that +they can only be instantiated by certain families of types. As such, +Dafny distinguishes types that support the equality operation +not only in ghost contexts but also in compiled contexts. To indicate +that a type parameter is restricted to such _equality supporting_ +types, the name of the type parameter takes the suffix +"`(==)`".[^fn-type-mode] For example, +``` +method Compare(a: T, b: T) returns (eq: bool) +{ + if a == b { eq := true; } else { eq := false; } +} +``` +is a method whose type parameter is restricted to equality-supporting +types. Again, note that _all_ types support equality in _ghost_ +contexts; the difference is only for non-ghost (that is, compiled) +code. Co-inductive datatypes, function types, as well as inductive +datatypes with ghost parameters are examples of types that are not +equality supporting. + +[^fn-type-mode]: Being equality-supporting is just one of many + _modes_ that one can imagine types in a rich type system to have. + For example, other modes could include having a total order, + being zero-initializable, and possibly being uninhabited. If + Dafny were to support more modes in the future, the "`(\( \))`"-suffix + syntax may be extended. For now, the suffix can only indicate the + equality-supporting mode. + +Dafny has some inference support that makes certain signatures less +cluttered (described in a different part of the Dafny language +reference). In some cases, this support will +infer that a type parameter must be restricted to equality-supporting +types, in which case Dafny adds the "`(==)`" automatically. + +TODO: Need to describe type inference somewhere. + +# Generic Instantiation +```` +GenericInstantiation = "<" Type { "," Type } ">" +```` +When a generic entity is used, actual types must be specified for each +generic parameter. This is done using a ``GenericInstantiation``. +If the `GenericInstantiation` is omitted, type inference will try +to fill these in. + +# Collection types + +Dafny offers several built-in collection types. + +## Sets +```` +FiniteSetType_ = "set" [ GenericInstantiation ] +InfiniteSetType_ = "iset" [ GenericInstantiation ] +```` + +For any type `T`, each value of type `set` is a finite set of +`T` values. + +TODO: +Set membership is determined by equality in the type `T`, +so `set` can be used in a non-ghost context only if `T` is equality +supporting. + +For any type `T`, each value of type `iset` is a potentially infinite +set of `T` values. + +A set can be formed using a _set display_ expression, which is a +possibly empty, unordered, duplicate-insensitive list of expressions +enclosed in curly braces. To illustrate, +``` +{} {2, 7, 5, 3} {4+2, 1+5, a*b} +``` +are three examples of set displays. There is also a _set comprehension_ +expression (with a binder, like in logical quantifications), described in +section [#sec-set-comprehension-expressions]. + +In addition to equality and disequality, set types +support the following relational operations: + ++-----------------+------------------------------------+ +| operator | description | ++-----------------+------------------------------------+ +| [<]{.monospace} | proper subset | +| `<=` | subset | +| `>=` | superset | +| `>` | proper superset | ++-----------------+------------------------------------+ + +Like the arithmetic relational operators, these operators are +chaining. + +Sets support the following binary operators, listed in order of +increasing binding power: + ++---------------+------------------------------------+ +| operator | description | ++---------------+------------------------------------+ +| `!!` | disjointness | ++---------------+------------------------------------+ +| `+` | set union | +| `-` | set difference | ++---------------+------------------------------------+ +| `*` | set intersection | ++---------------+------------------------------------+ + +The associativity rules of `+`, `-`, and `*` are like those of the +arithmetic operators with the same names. The expression `A !! B`, +whose binding power is the same as equality (but which neither +associates nor chains with equality), says that sets `A` and `B` have +no elements in common, that is, it is equivalent to +``` +A * B == {} +``` +However, the disjointness operator is chaining, so `A !! B !! C !! D` +means: +``` +A * B == {} && (A + B) * C == {} && (A + B + C) * D == {} +``` + +In addition, for any set `s` of type `set` or `iset` and any +expression `e` of type `T`, sets support the following operations: + ++---------------------+------------------------------------+ +| expression | description | ++---------------------+------------------------------------+ +| [\|s\|]{.monospace} | set cardinality | +| `e in s` | set membership | +| `e !in s` | set non-membership | ++---------------------+------------------------------------+ + +The expression `e !in s` is a syntactic shorthand for `!(e in s)`. + +## Multisets +```` +MultisetType_ = "multiset" [ GenericInstantiation ] +```` + +A _multiset_ is similar to a set, but keeps track of the multiplicity +of each element, not just its presence or absence. For any type `T`, +each value of type `multiset` is a map from `T` values to natural +numbers denoting each element's multiplicity. Multisets in Dafny +are finite, that is, they contain a finite number of each of a finite +set of elements. Stated differently, a multiset maps only a finite +number of elements to non-zero (finite) multiplicities. + +Like sets, multiset membership is determined by equality in the type +`T`, so `multiset` can be used in a non-ghost context only if `T` +is equality supporting. + +A multiset can be formed using a _multiset display_ expression, which +is a possibly empty, unordered list of expressions enclosed in curly +braces after the keyword `multiset`. To illustrate, +``` +multiset{} multiset{0, 1, 1, 2, 3, 5} multiset{4+2, 1+5, a*b} +``` +are three examples of multiset displays. There is no multiset +comprehension expression. + +In addition to equality and disequality, multiset types +support the following relational operations: + ++-----------------+------------------------------------+ +| operator | description | ++-----------------+------------------------------------+ +| [<]{.monospace} | proper multiset subset | +| `<=` | multiset subset | +| `>=` | multiset superset | +| `>` | proper multiset superset | ++-----------------+------------------------------------+ + +Like the arithmetic relational operators, these operators are +chaining. + +Multisets support the following binary operators, listed in order of +increasing binding power: + ++---------------+------------------------------------+ +| operator | description | ++---------------+------------------------------------+ +| `!!` | multiset disjointness | ++---------------+------------------------------------+ +| `+` | multiset union | +| `-` | multiset difference | ++---------------+------------------------------------+ +| `*` | multiset intersection | ++---------------+------------------------------------+ + +The associativity rules of `+`, `-`, and `*` are like those of the +arithmetic operators with the same names. The `+` operator +adds the multiplicity of corresponding elements, the `-` operator +subtracts them (but 0 is the minimum multiplicity), +and the `*` has multiplicity that is the minimum of the +multiplicity of the operands. + +The expression `A !! B` +says that multisets `A` and `B` have no elements in common, that is, +it is equivalent to +``` +A * B == multiset{} +``` +Like the analogous set operator, `!!` is chaining. + +In addition, for any multiset `s` of type `multiset`, +expression `e` of type `T`, and non-negative integer-based numeric +`n`, multisets support the following operations: + ++---------------------+------------------------------------------+ +| expression | description | ++---------------------+------------------------------------------+ +| [\|s\|]{.monospace} | multiset cardinality | +| `e in s` | multiset membership | +| `e !in s` | multiset non-membership | +| `s[e]` | multiplicity of `e` in `s` | +| `s[e := n]` | multiset update (change of multiplicity) | ++---------------------+------------------------------------------+ + +The expression `e in s` returns `true` if and only if `s[e] != 0`. +The expression `e !in s` is a syntactic shorthand for `!(e in s)`. +The expression `s[e := n]` denotes a multiset like +`s`, but where the multiplicity of element `e` is `n`. Note that +the multiset update `s[e := 0]` results in a multiset like `s` but +without any occurrences of `e` (whether or not `s` has occurrences of +`e` in the first place). As another example, note that +`s - multiset{e}` is equivalent to: +``` +if e in s then s[e := s[e] - 1] else s +``` + +## Sequences +```` +SequenceType_ = "seq" [ GenericInstantiation ] +```` + +For any type `T`, a value of type `seq` denotes a _sequence_ of `T` +elements, that is, a mapping from a finite downward-closed set of natural +numbers (called _indices_) to `T` values. (Thinking of it as a map, +a sequence is therefore something of a dual of a multiset.) + +### Sequence Displays +A sequence can be formed using a _sequence display_ expression, which +is a possibly empty, ordered list of expressions enclosed in square +brackets. To illustrate, +``` +[] [3, 1, 4, 1, 5, 9, 3] [4+2, 1+5, a*b] +``` +are three examples of sequence displays. There is no sequence +comprehension expression. + +### Sequence Relational Operators +In addition to equality and disequality, sequence types +support the following relational operations: + ++-----------------+------------------------------------+ +| operator | description | ++-----------------+------------------------------------+ +| [<]{.monospace} | proper prefix | +| `<=` | prefix | ++-----------------+------------------------------------+ + +Like the arithmetic relational operators, these operators are +chaining. Note the absence of `>` and `>=`. + +### Sequence Concatenation +Sequences support the following binary operator: + ++---------------+------------------------------------+ +| operator | description | ++---------------+------------------------------------+ +| `+` | concatenation | ++---------------+------------------------------------+ + +Operator `+` is associative, like the arithmetic operator with the +same name. + +### Other Sequence Expressions +In addition, for any sequence `s` of type `seq`, expression `e` +of type `T`, integer-based numeric `i` satisfying `0 <= i < |s|`, and +integer-based numerics `lo` and `hi` satisfying +`0 <= lo <= hi <= |s|`, sequences support the following operations: + ++---------------------+----------------------------------------+ +| expression | description | ++---------------------+----------------------------------------+ +| [\|s\|]{.monospace} | sequence length | +| `s[i]` | sequence selection | +| `s[i := e]` | sequence update | +| `e in s` | sequence membership | +| `e !in s` | sequence non-membership | +| `s[lo..hi]` | subsequence | +| `s[lo..]` | drop | +| `s[..hi]` | take | +| `s[\(_slices_\)]` | slice | +| `multiset(s)` | sequence conversion to a `multiset` | ++---------------------+----------------------------------------+ + +Expression `s[i := e]` returns a sequence like `s`, except that the +element at index `i` is `e`. The expression `e in s` says there +exists an index `i` such that `s[i] == e`. It is allowed in non-ghost +contexts only if the element type `T` is equality supporting. +The expression `e !in s` is a syntactic shorthand for `!(e in s)`. + +Expression `s[lo..hi]` yields a sequence formed by taking the first +`hi` elements and then dropping the first `lo` elements. The +resulting sequence thus has length `hi - lo`. Note that `s[0..|s|]` +equals `s`. If the upper bound is omitted, it +defaults to `|s|`, so `s[lo..]` yields the sequence formed by dropping +the first `lo` elements of `s`. If the lower bound is omitted, it +defaults to `0`, so `s[..hi]` yields the sequence formed by taking the +first `hi` elements of `s`. + +In the sequence slice operation, `\(_slices_\)` is a nonempty list of +length designators separated and optionally terminated by a colon, and +there is at least one colon. Each length designator is a non-negative +integer-based numeric, whose sum is no greater than `|s|`. If there +are _k_ colons, the operation produces _k + 1_ consecutive subsequences +from `s`, each of the length indicated by the corresponding length +designator, and returns these as a sequence of +sequences.[^fn-slice-into-tuple] If `\(_slices_\)` is terminated by a +colon, then the length of the last slice extends until the end of `s`, +that is, its length is `|s|` minus the sum of the given length +designators. For example, the following equalities hold, for any +sequence `s` of length at least `10`: +``` +var t := [3.14, 2.7, 1.41, 1985.44, 100.0, 37.2][1:0:3]; +assert |t| == 3 && t[0] == [3.14] && t[1] == []; +assert t[2] == [2.7, 1.41, 1985.44]; +var u := [true, false, false, true][1:1:]; +assert |u| == 3 && u[0][0] && !u[1][0] && u[2] == [false, true]; +assert s[10:][0] == s[..10]; +assert s[10:][1] == s[10..]; +``` + +[^fn-slice-into-tuple]: Now that Dafny supports built-in tuples, the + plan is to change the sequence slice operation to return not a + sequence of subsequences, but a tuple of subsequences. + +The operation `multiset(s)` yields the multiset of elements of +sequence `s`. It is allowed in non-ghost contexts only if the element +type `T` is equality supporting. + +### Strings +```` +StringType_ = "string" +```` + +A special case of a sequence type is `seq`, for which Dafny +provides a synonym: `string`. Strings are like other sequences, but +provide additional syntax for sequence display expressions, namely +_string literals_. There are two forms of the syntax for string +literals: the _standard form_ and the _verbatim form_. + +String literals of the standard form are enclosed in double quotes, as +in `"Dafny"`. To include a double quote in such a string literal, +it is necessary to use an escape sequence. Escape sequences can also +be used to include other characters. The supported escape sequences +are the same as those for character literals, see Section [#sec-characters]. +For example, the Dafny expression `"say \"yes\""` represents the +string `'say "yes"'`. +The escape sequence for a single quote is redundant, because +[\"\'\"]{.monospace} and [\"\\\'\"]{.monospace} denote the same +string---both forms are provided in order to support the same +escape sequences as for character literals. + +String literals of the verbatim form are bracketed by +[@\"]{.monospace} and [\"]{.monospace}, as in `@"Dafny"`. To include +a double quote in such a string literal, it is necessary to use the +escape sequence [\"\"]{.monospace}, that is, to write the character +twice. In the verbatim form, there are no other escape sequences. +Even characters like newline can be written inside the string literal +(hence spanning more than one line in the program text). + +For example, the following three expressions denote the same string: +``` +"C:\\tmp.txt" +@"C:\tmp.txt" +['C', ':', '\\', 't', 'm', 'p', '.', 't', 'x', 't'] +``` + +Since strings are sequences, the relational operators [<]{.monospace} +and `<=` are defined on them. Note, however, that these operators +still denote proper prefix and prefix, respectively, not some kind of +alphabetic comparison as might be desirable, for example, when +sorting strings. + +## Finite and Infinite Maps +```` +FiniteMapType_ = "map" [ GenericInstantiation ] +InfiniteMapType_ = "imap" [ GenericInstantiation ] +```` + +For any types `T` and `U`, a value of type `map` denotes a +_(finite) map_ +from `T` to `U`. In other words, it is a look-up table indexed by +`T`. The _domain_ of the map is a finite set of `T` values that have +associated `U` values. Since the keys in the domain are compared +using equality in the type `T`, type `map` can be used in a +non-ghost context only if `T` is equality supporting. + +Similarly, for any types `T` and `U`, a value of type `imap` +denotes a _(possibly) infinite map_. In most regards, `imap` is +like `map`, but a map of type `imap` is allowed to have an +infinite domain. + +A map can be formed using a _map display_ expression (see ``MapDisplayExpr``), +which is a possibly empty, ordered list of _maplets_, each maplet having the +form `t := u` where `t` is an expression of type `T` and `u` is an +expression of type `U`, enclosed in square brackets after the keyword +`map`. To illustrate, +``` +map[] map[20 := true, 3 := false, 20 := false] map[a+b := c+d] +``` +are three examples of map displays. By using the keyword `imap` +instead of `map`, the map produced will be of type `imap` +instead of `map`. Note that an infinite map (`imap`) is allowed +to have a finite domain, whereas a finite map (`map`) is not allowed +to have an infinite domain. +If the same key occurs more than +once, only the last occurrence appears in the resulting +map.[^fn-map-display] There is also a _map comprehension expression_, +explained in section [#sec-map-comprehension-expression]. + +[^fn-map-display]: This is likely to change in the future to disallow + multiple occurrences of the same key. + +For any map `fm` of type `map`, +any map `m` of type `map` or `imap`, +any expression `t` of type `T`, +any expression `u` of type `U`, and any `d` in the domain of `m` (that +is, satisfying `d in m`), maps support the following operations: + ++----------------------+------------------------------------+ +| expression | description | ++----------------------+------------------------------------+ +| [\|fm\|]{.monospace} | map cardinality | +| `m[d]` | map selection | +| `m[t := u]` | map update | +| `t in m` | map domain membership | +| `t !in m` | map domain non-membership | ++----------------------+------------------------------------+ + +`|fm|` denotes the number of mappings in `fm`, that is, the +cardinality of the domain of `fm`. Note that the cardinality operator +is not supported for infinite maps. +Expression `m[d]` returns the `U` value that `m` associates with `d`. +Expression `m[t := u]` is a map like `m`, except that the +element at key `t` is `u`. The expression `t in m` says `t` is in the +domain of `m` and `t !in m` is a syntactic shorthand for +`!(t in m)`.[^fn-map-membership] + +[^fn-map-membership]: This is likely to change in the future as + follows: The `in` and `!in` operations will no longer be + supported on maps. Instead, for any map `m`, `m.Domain` will + return its domain as a set and `m.Range` will return, also as a + set, the image of `m` under its domain. + +Here is a small example, where a map `cache` of type `map` +is used to cache computed values of Joule-Thomson coefficients for +some fixed gas at a given temperature: +``` +if K in cache { // check if temperature is in domain of cache + coeff := cache[K]; // read result in cache +} else { + coeff := ComputeJouleThomsonCoefficient(K); // do expensive computation + cache := cache[K := coeff]; // update the cache +} +``` + +# Types that stand for other types +```` +SynonymTypeDecl = + ( SynonymTypeDefinition_ | OpaqueTypeDefinition_ ) [ ";" ] +```` +It is sometimes useful to know a type by several names or to treat a +type abstractly. Synonym and opaque types serve this purpose. + +## Type synonyms +```` +SynonymTypeDefinition_ = + "type" { Attribute } SynonymTypeName [ GenericParameters ] "=" Type +```` + +A _type synonym_ declaration: +``` +type Y = G +``` +declares `Y` to be a synonym for the type `G`. Here, `T` is a +nonempty list of type parameters (each of which is optionally +designated with the suffix "`(==)`"), which can be used as free type +variables in `G`. If the synonym has no type parameters, the "``" +is dropped. In all cases, a type synonym is just a synonym. That is, +there is never a difference, other than possibly in error messages +produced, between `Y` and `G`. + +For example, the names of the following type synonyms may improve the +readability of a program: +``` +type Replacements = map +type Vertex = int +``` + +As already described in Section [#sec-strings], `string` is a built-in +type synonym for `seq`, as if it would have been declared as +follows: +``` +type string = seq +``` + +## Opaque types +```` +OpaqueTypeDefinition_ = "type" { Attribute } SynonymTypeName + [ "(" "==" ")" ] [ GenericParameters ] +```` + +A special case of a type synonym is one that is underspecified. Such +a type is declared simply by: +``` +type Y +``` +It is known as an _opaque type_. Its definition can be revealed in a +refining module. To indicate that `Y` designates an +equality-supporting type, "`(==)`" can be written immediately +following the name "`Y`". + +For example, the declarations +``` +type T +function F(t: T): T +``` +can be used to model an uninterpreted function `F` on some +arbitrary type `T`. As another example, +``` +type Monad +``` +can be used abstractly to represent an arbitrary parameterized monad. + +# Well-founded Functions and Extreme Predicates +This section is a tutorial on well-founded functions and extreme predicates. +We place it here in preparation for Section [#sec-class-types] +where function and predicate definitions are described. + +Recursive functions are a core part of computer science and mathematics. +Roughly speaking, when the definition of such a function spells out a +terminating computation from given arguments, we may refer to +it as a _well-founded function_. For example, the common factorial and +Fibonacci functions are well-founded functions. + +There are also other ways to define functions. An important case +regards the definition of a boolean function as an extreme solution +(that is, a least or greatest solution) to some equation. For +computer scientists with interests in logic or programming languages, +these _extreme predicates_ are important because they describe the +judgments that can be justified by a given set of inference rules +(see, e.g., [@CamilleriMelham:InductiveRelations; +@Winskel:FormalSemantics; + @LeroyGrall:CoinductiveBigStep; @Pierce:SoftwareFoundations; + @NipkowKlein:ConcreteSemantics]). + +To benefit from machine-assisted reasoning, it is necessary not just +to understand extreme predicates but also to have techniques for +proving theorems about them. A foundation for this reasoning was +developed by Paulin-Mohring [@PaulinMohring:InductiveCoq] and is the +basis of the constructive logic supported by Coq [@Coq:book] as well +as other proof assistants [@BoveDybjerNorell:BriefAgda; +@SwamyEtAl:Fstar2011]. Essentially, the idea is to represent the +knowledge that an extreme predicate holds by the proof term by which +this knowledge was derived. For a predicate defined as the least +solution, such proof terms are values of an inductive datatype (that +is, finite proof trees), and for the greatest solution, a coinductive +datatype (that is, possibly infinite proof trees). This means that +one can use induction and coinduction when reasoning about these proof +trees. Therefore, these extreme predicates are known as, +respectively, _inductive predicates_ and _coinductive predicates_ (or, +_co-predicates_ for short). Support for extreme predicates is also +available in the proof assistants Isabelle [@Paulson:CADE1994] and HOL +[@Harrison:InductiveDefs]. + +Dafny supports both well-founded functions and extreme predicates. +This section is a tutorial that describes the difference in general +terms, and then describes novel syntactic support in Dafny for +defining and proving lemmas with extreme predicates. Although Dafny's +verifier has at its core a first-order SMT solver, Dafny's logical +encoding makes it possible to reason about fixpoints in an automated +way. + +The encoding for coinductive predicates in Dafny was described previously +[@LeinoMoskal:Coinduction] and is here described in Section +[#sec-co-inductive-datatypes]. + +## Function Definitions + +To define a function $f \colon X \to Y$ in terms of itself, one can +write an equation like + +~ Equation {#eq-general} + f \Equal \F(f) +~ + +where $\mathcal{F}$ is a non-recursive function of type +$(X \to Y) \to X \to Y$. Because it takes a function as an argument, +$\mathcal{F}$ is referred to as a _functor_ (or _functional_, but not to be +confused by the category-theory notion of a functor). +Throughout, I will assume that $\F(f)$ by itself is well defined, +for example that it does not divide by zero. I will also assume that $f$ occurs +only in fully applied calls in $\F(f)$; eta expansion can be applied to +ensure this. If $f$ is a boolean function, that is, if $Y$ is +the type of booleans, then I call $f$ a _predicate_. + +For example, the common Fibonacci function over the +natural numbers can be defined by the equation + +~ Equation + \fib \Equal + \lambda n \bullet\; \ite{n < 2}{n}{\fib(n-2) + \fib(n-1)} +~ + +With the understanding that the argument $n$ is universally +quantified, we can write this equation equivalently as + +~ Equation {#eq-fib} + \fib(n) \Equal + \ite{n < 2}{n}{\fib(n-2) + \fib(n-1)} +~ + +The fact that the function being defined occurs on both sides of the equation +causes concern that we might not be defining the function properly, leading to a +logical inconsistency. In general, there +could be many solutions to an equation like [#eq-general] or there could be none. +Let's consider two ways to make sure we're defining the function uniquely. + +### Well-founded Functions + +A standard way to ensure that equation [#eq-general] has a unique solution in $f$ is +to make sure the recursion is well-founded, which roughly means that the +recursion terminates. This is done by introducing any well-founded +relation $\Less$ on the domain of $f$ and making sure that the argument to each recursive +call goes down in this ordering. More precisely, if we formulate [#eq-general] as + +~ Equation + f(x) \Equal \F'(f) +~ + +then we want to check $E \Less x$ for each call $f(E)$ in $\F'(f)$. When a function +definition satisfies this _decrement condition_, then the function is said to be +_well-founded_. + +For example, to check the decrement condition for $\fib$ in [#eq-fib], we can pick +$\Less$ to be the arithmetic less-than relation on natural numbers and check the +following, for any $n$: + +~ Equation + 2 \leq n \;\;\Imp\;\; n-2 \Less n \;\And\; n-1 \Less n +~ + +Note that we are entitled to using the antecedent $2 \leq n$, because that is the +condition under which the else branch in [#eq-fib] is evaluated. + +A well-founded function is often thought of as "terminating" in the sense +that the recursive _depth_ in evaluating $f$ +on any given argument is finite. That is, there are no infinite descending chains +of recursive calls. However, the evaluation of $f$ on a given argument +may fail to terminate, because its _width_ may be infinite. For example, let $P$ +be some predicate defined on the ordinals and let $\PDownward$ be a predicate on the +ordinals defined by the following equation: + +~ Equation + \PDownward(o) \Equal + P(o) \And \forall p \bullet\; p \Less o \Imp \PDownward(p) +~ + +With $\Less$ as the usual ordering on ordinals, this equation satisfies the decrement +condition, but evaluating $\PDownward(\omega)$ would require evaluating +$\PDownward(n)$ for every natural number $n$. However, what we are concerned +about here is to avoid mathematical inconsistencies, and that is +indeed a consequence of the decrement condition. + +#### Example with Well-founded Functions {#sec-fib-example} + +So that we can later see how inductive proofs are done in Dafny, let's prove that +for any $n$, $\fib(n)$ is even iff $n$ is a multiple of $3$. +We split our task into +two cases. If $n < 2$, then the property follows directly from the definition +of $\fib$. Otherwise, note that exactly one of the three numbers $n-2$, $n-1$, and $n$ +is a multiple of 3. If $n$ is the multiple of 3, then by invoking the +induction hypothesis on $n-2$ +and $n-1$, we obtain that $\fib(n-2) + \fib(n-1)$ is the sum of two odd numbers, +which is even. If $n-2$ or $n-1$ is a multiple of 3, then by invoking the induction +hypothesis on $n-2$ and $n-1$, we obtain that $\fib(n-2) + \fib(n-1)$ is the sum of an +even number and an odd number, which is odd. In this proof, we invoked the induction +hypothesis on $n-2$ and on $n-1$. This is allowed, because both are smaller than +$n$, and hence the invocations go down in the well-founded ordering on natural numbers. + +### Extreme Solutions + +We don't need to exclude the possibility of equation [#eq-general] having multiple +solutions---instead, we can just be clear about which one of them we want. +Let's explore this, after a smidgen of lattice theory. + +For any complete lattice $(Y,\leq)$ and any set $X$, we can by _pointwise extension_ define +a complete lattice $(X \to Y, \FBelow)$, where for any $f,g \colon X \to Y$, + +~ Equation + f \FBelow q \Equiv + \forall x \bullet\; f(x) \leq g(x) +~ + +In particular, if $Y$ is the set of booleans ordered by implication ($\false \leq \true$), +then the set of predicates over any domain $X$ forms a complete lattice. +Tarski's Theorem [@Tarski:theorem] tells us that any monotonic function over a +complete lattice has a least and a greatest fixpoint. In particular, this means that +$\F$ has a least fixpoint and a greatest fixpoint, provided $\F$ is monotonic. + +Speaking about the _set of solutions_ in $f$ to [#eq-general] is the same as speaking +about the _set of fixpoints_ of functor $\F$. In particular, the least and greatest +solutions to [#eq-general] are the same as the least and greatest fixpoints of $\F$. +In casual speak, it happens that we say "fixpoint of [#eq-general]", or more +grotesquely, "fixpoint of $f$" when we really mean "fixpoint of $\F$". + +In conclusion of our little excursion into lattice theory, we have that, under the +proviso of $\F$ being monotonic, the set of solutions in $f$ to [#eq-general] is nonempty, +and among these solutions, there is in the $\FBelow$ ordering a least solution (that is, +a function that returns $\false$ more often than any other) and a greatest solution (that +is, a function that returns $\true$ more often than any other). + +When discussing extreme solutions, I will now restrict my attention to boolean functions +(that is, with $Y$ being the type of booleans). Functor $\F$ is monotonic +if the calls to $f$ in $\F'(f)$ are in _positive positions_ (that is, under an even number +of negations). Indeed, from now on, I will restrict my attention to such monotonic +functors $\F$. + +Let me introduce a running example. Consider the following equation, +where $x$ ranges over the integers: + +~ Equation {#eq-EvenNat} + g(x) \Equal (x = 0 \Or g(x-2)) +~ + +This equation has four solutions in $g$. With $w$ ranging over the integers, they are: + +~ Equation + \begin{array}{r@{}l} + g(x) \Equiv{}& x \in \{w \;|\; 0 \leq w \And w\textrm{ even}\} \\ + g(x) \Equiv{}& x \in \{w \;|\; w\textrm{ even}\} \\ + g(x) \Equiv{}& x \in \{w \;|\; (0 \leq w \And w\textrm{ even}) \Or w\textrm{ odd}\} \\ + g(x) \Equiv{}& x \in \{w \;|\; \true\} + \end{array} +~ + +The first of these is the least solution and the last is the greatest solution. + +In the literature, the definition of an extreme predicate is often given as a set of +_inference rules_. To designate the least solution, a single line separating the +antecedent (on top) from conclusion (on bottom) is used: + +~ Equation {#g-ind-rule} + \frac{}{g(0)} + \qquad\qquad + \frac{g(x-2)}{g(x)} +~ + +Through repeated applications of such rules, one can show that the predicate holds for +a particular value. For example, the _derivation_, or _proof tree_, +to the left in Figure [#fig-proof-trees] shows that $g(6)$ holds. +(In this simple example, the derivation is a rather degenerate proof "tree".) +The use of these inference rules gives rise to a least solution, because proof trees are +accepted only if they are _finite_. + +~ Begin Figure { #fig-proof-trees caption="Left: a finite proof tree that uses the rules of [#g-ind-rule] to establish $g(6)$. Right: an infinite proof tree that uses the rules of [#g-coind-rule] to establish $g(1)$." } +~ Begin Columns +~~ Column { vertical-align=bottom } +~ Math +\dfrac{ + \dfrac{ + \dfrac{ + \dfrac{}{g(0)\xstrut} + }{g(2)\xstrut} + }{g(4)\xstrut} + }{g(6)\xupstrut} +~ +~~ +~~ Column { width=5em } + +~~ +~~ Column { vertical-align=bottom } +~ Math +\Dfrac{ + \Dfrac{ + \Dfrac{ + \Dfrac{ + {}_{\vdots } + }{{g(-5)}} + }{{g(-3)}} + }{{g(-1)}} + }{g(1)} +~ +~~ +~ End Columns +~ End Figure + +When inference rules are to designate the greatest solution, a double +line is used: + +~ Equation {#g-coind-rule} + \Dfrac{}{g(0)} + \qquad\qquad + \Dfrac{g(x-2)}{g(x)} +~ + +In this case, proof trees are allowed to be infinite. For example, the (partial depiction +of the) infinite proof tree on the right in Figure [#fig-proof-trees] shows that $g(1)$ holds. + +Note that derivations may not be unique. For example, in the case of the greatest +solution for $g$, there are two proof trees that establish $g(0)$: one is the finite +proof tree that uses the left-hand rule of [#g-coind-rule] once, the other is the infinite +proof tree that keeps on using the right-hand rule of [#g-coind-rule]. + +### Working with Extreme Predicates + +In general, one cannot evaluate whether or not an extreme predicate holds for some +input, because doing so may take an infinite number of steps. For example, following +the recursive calls in the definition [#eq-EvenNat] to try to evaluate $g(7)$ would never +terminate. However, there are useful ways to establish that an extreme predicate holds +and there are ways to make use of one once it has been established. + +For any $\F$ as in [#eq-general], I define two infinite series of well-founded +functions, $\iter{f}_k$ and $\Iter{f}_k$ +where $k$ ranges over the natural numbers: + +~ Equation {#eq-least-approx} + \iter{f}_k(x) \Equal \left\{ + \begin{array}{ll} + \false & \textrm{if } k = 0 \\ + \F(\iter{f}_{k-1})(x) & \textrm{if } k > 0 + \end{array} + \right. +~ +~ Equation {#eq-greatest-approx} + \Iter{f}_k(x) \Equal \left\{ + \begin{array}{ll} + \true & \textrm{if } k = 0 \\ + \F(\Iter{f}_{k-1})(x) & \textrm{if } k > 0 + \end{array} + \right. +~ + +These functions are called the _iterates_ of $f$, and I will also refer to them +as the _prefix predicates_ of $f$ (or the _prefix predicate_ of $f$, if we think +of $k$ as being a parameter). +Alternatively, we can define $\iter{f}_k$ and $\Iter{f}_k$ without mentioning $x$: +Let $\bot$ denote the function that always returns $\false$, let $\top$ +denote the function that always returns $\true$, and let a superscript on $\F$ denote +exponentiation (for example, $\F^0(f) = f$ and $\F^2(f) = \F(\F(f))$). +Then, [#eq-least-approx] and [#eq-greatest-approx] can be stated equivalently as +$\iter{f}_k = \F^k(\bot)$ and $\Iter{f}_k = \F^k(\top)$. + +For any solution $f$ to equation [#eq-general], we have, for any $k$ and $\ell$ +such that $k \leq \ell$: + +~ Equation {#eq-prefix-postfix} + \iter{f}_k \quad\FBelow\quad + \iter{f}_\ell \quad\FBelow\quad + f \quad\FBelow\quad + \Iter{f}_\ell \quad\FBelow\quad + \Iter{f}_k +~ + +In other words, every $\iter{f}_k$ is a _pre-fixpoint_ of $f$ and every $\Iter{f}_k$ is a _post-fixpoint_ +of $f$. Next, I define two functions, $f\least$ and $f\greatest$, in +terms of the prefix predicates: + +~ Equation {#eq-least-is-exists} + f\least(x) \Equal \exists k \bullet\; \iter{f}_k(x) +~ +~ Equation {#eq-greatest-is-forall} + f\greatest(x) \Equal \forall k \bullet\; \Iter{f}_k(x) +~ + +By [#eq-prefix-postfix], we also have that $f\least$ is a pre-fixpoint of $\F$ and $f\greatest$ +is a post-fixpoint of $\F$. The marvelous thing is that, if $\F$ is _continuous_, then +$f\least$ and $f\greatest$ are the least and greatest fixpoints of $\F$. +These equations let us do proofs by induction when dealing with extreme predicates. +I will explain in Section [#sec-friendliness] how to check for continuity. + +Let's consider two examples, both involving function $g$ in +[#eq-EvenNat]. As it turns out, $g$'s defining functor is continuous, +and therefore I will write $g\least$ and $g\greatest$ to denote the +least and greatest solutions for $g$ in [#eq-EvenNat]. + +#### Example with Least Solution {#sec-example-least-solution} + +The main technique for establishing that $g\least(x)$ holds for some +$x$, that is, proving something of the form $Q \Imp g\least(x)$, is to +construct a proof tree like the one for $g(6)$ in Figure +[#fig-proof-trees]. For a proof in this direction, since we're just +applying the defining equation, the fact that +we're using a least solution for $g$ never plays a role (as long as we +limit ourselves to finite derivations). + +The technique for going in the other direction, proving something _from_ an established +$g\least$ property, that is, showing something of the form $g\least(x) \Imp R$, typically +uses induction on the structure of the proof tree. When the antecedent of our proof +obligation includes a predicate term $g\least(x)$, it is sound to +imagine that we have been given a proof tree for $g\least(x)$. Such a proof tree +would be a data structure---to be more precise, a term in an +_inductive datatype_. +For this reason, least solutions like $g\least$ have been given the +name _inductive predicate_. + +Let's prove $g\least(x) \Imp 0 \leq x \And x \textrm{ even}$. +We split our task into two cases, corresponding to which of the two +proof rules in [#g-ind-rule] was the +last one applied to establish $g\least(x)$. If it was the left-hand rule, then $x=0$, +which makes it easy to establish the conclusion of our proof goal. If it was the +right-hand rule, then we unfold the proof tree one level and obtain $g\least(x-2)$. +Since the proof tree for $g\least(x-2)$ is smaller than where we started, we invoke +the _induction hypothesis_ and obtain $0 \leq (x-2) \And (x-2) \textrm{ even}$, from which +it is easy to establish the conclusion of our proof goal. + +Here's how we do the proof formally using [#eq-least-is-exists]. We massage the +general form of our proof goal: + +|~~~|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~| +| | $f\greatest(x) \Imp R$ | +| = |      { [#eq-least-is-exists] } | +| | $(\exists k \bullet\; \iter{f}_k(x)) \Imp R$ | +| = |      { distribute $\Imp$ over $\exists$ to the left } | +| | $\forall k \bullet\; (\iter{f}_k(x) \Imp R)$ | + +The last line can be proved by induction over $k$. So, in our case, we prove +$\iter{g}_k(x) \Imp 0 \leq x \And x \textrm{ even}$ for every $k$. +If $k=0$, then $\iter{g}_k(x)$ is $\false$, so our goal holds trivially. +If $k > 0$, then $\iter{g}_k(x) = (x = 0 \Or \iter{g}_{k-1}(x-2))$. Our goal holds easily +for the first disjunct ($x=0$). For the other disjunct, +we apply the induction hypothesis (on the smaller $k-1$ and with $x-2$) and +obtain $0 \leq (x-2) \And (x-2) \textrm{ even}$, from which our proof goal +follows. + +#### Example with Greatest Solution {#sec-example-greatest-solution} + +We can think of a given predicate $g\greatest(x)$ as being represented +by a proof tree---in this case a term in a _coinductive datatype_, +since the proof may be infinite. +For this reason, greatest solutions like $g\greatest$ have +been given the name _coinductive predicate_, or _co-predicate_ for short. +The main technique for proving something from a given proof tree, that +is, to prove something of the form $g\greatest(x) \Imp R$, is to +destruct the proof. Since this is just unfolding the defining +equation, the fact that we're using a greatest solution for $g$ never +plays a role (as long as we limit ourselves to a finite number of +unfoldings). + +To go in the other direction, to establish a predicate defined as a greatest solution, +like $Q \Imp g\greatest(x)$, we may need an infinite number of steps. For this purpose, +we can use induction's dual, _coinduction_. Were it not for one little detail, coinduction +is as simple as continuations in programming: the next part of the proof obligation +is delegated to the _coinduction hypothesis_. The little detail is making sure that +it is the "next" part we're passing on for the continuation, not the same part. This +detail is called _productivity_ and corresponds to the requirement in +induction of making sure we're going down a well-founded relation when +applying the induction hypothesis. There are +many sources with more information, see for example the classic account by +Jacobs and Rutten [@JacobsRutten:IntroductionCoalgebra] +or a new attempt by Kozen and Silva +that aims to emphasize the simplicity, not the mystery, of +coinduction [@KozenSilva:Coinduction]. + +Let's prove $\true \Imp g\greatest(x)$. The intuitive coinductive proof goes like this: +According to the right-hand rule of [#g-coind-rule], $g\greatest(x)$ follows if we +establish $g\greatest(x-2)$, and that's easy to do by invoking the coinduction hypothesis. +The "little detail", productivity, is satisfied in this proof because we applied +a rule in [#g-coind-rule] before invoking the coinduction hypothesis. + +For anyone who may have felt that the intuitive proof felt too easy, here is a formal +proof using [#eq-greatest-is-forall], which relies only on induction. We massage the +general form of our proof goal: + +|~~~|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~| +| | $Q \Imp f\greatest(x)$ | +| = |      { [#eq-greatest-is-forall] } | +| | $Q \Imp \forall k \bullet\; \Iter{f}_k(x)$ | +| = |      { distribute $\Imp$ over $\forall$ to the right } | +| | $\forall k \bullet\; Q \Imp \Iter{f}_k(x)$ | + +The last line can be proved by induction over $k$. So, in our case, we prove +$\true \Imp \Iter{g}_k(x)$ for every $k$. +If $k=0$, then $\Iter{g}_k(x)$ is $\true$, so our goal holds trivially. +If $k > 0$, then $\Iter{g}_k(x) = (x = 0 \Or \Iter{g}_{k-1}(x-2))$. We establish the second +disjunct by applying the induction hypothesis (on the smaller $k-1$ and with $x-2$). + +### Other Techniques + +Although in this paper I consider only well-founded functions and extreme +predicates, it is worth mentioning that there are additional ways of making sure that +the set of solutions to [#eq-general] is nonempty. For example, if all calls to $f$ in +$\F'(f)$ are _tail-recursive calls_, then (under the assumption that $Y$ is nonempty) the set of +solutions is nonempty. To see this, consider an attempted evaluation of $f(x)$ that fails +to determine a definite result value because of an infinite chain of calls that applies $f$ +to each value of some subset $X'$ of $X$. Then, apparently, the value of $f$ for any one +of the values in $X'$ is not determined by the equation, but picking any particular result +values for these makes for a consistent definition. +This was pointed out by Manolios and Moore [@ManoliosMoore:PartialFunctions]. +Functions can be underspecified in this way in the proof assistants ACL2 [@ACL2:book] +and HOL [@Krauss:PhD]. + +## Functions in Dafny + +In this section, I explain with examples the support in +Dafny[^fn-on-da-web] for well-founded functions, extreme predicates, +and proofs regarding these. + +[^fn-on-da-web]: Dafny is open source at [dafny.codeplex.com](http://dafny.codeplex.com) and can also be used online at [rise4fun.com/dafny](http://rise4fun.com/dafny). + +### Well-founded Functions in Dafny + +Declarations of well-founded functions are unsurprising. For example, the Fibonacci +function is declared as follows: + +``` +function fib(n: nat): nat +{ + if n < 2 then n else fib(n-2) + fib(n-1) +} +``` + +Dafny verifies that the body (given as an expression in curly braces) is well defined. +This includes decrement checks for recursive (and mutually recursive) calls. Dafny +predefines a well-founded relation on each type and extends it to lexicographic tuples +of any (fixed) length. For example, the well-founded relation $x \Less y$ for integers +is $x < y \And 0 \leq y$, the one for reals is $x \leq y - 1.0 \And 0.0 \leq y$ +(this is the same ordering as for integers, if you read the integer +relation as $x \leq y - 1 \And 0 \leq y$), the one for inductive +datatypes is structural inclusion, +and the one for coinductive datatypes is $\false$. + +Using a `decreases` clause, the programmer can specify the term in this predefined +order. When a function definition omits a `decreases` clause, Dafny makes a simple +guess. This guess (which can be inspected by hovering over the function name in the +Dafny IDE) is very often correct, so users are rarely bothered to provide explicit +`decreases` clauses. + +If a function returns `bool`, one can drop the result type `: bool` and change the +keyword `function` to `predicate`. + +### Proofs in Dafny + +Dafny has `lemma` declarations. These are really just special cases of methods: +they can have pre- and postcondition specifications and their body is a code block. +Here is the lemma we stated and proved in Section [#sec-fib-example]: + +``` +lemma FibProperty(n: nat) + ensures fib(n) % 2 == 0 <==> n % 3 == 0 +{ + if n < 2 { + } else { + FibProperty(n-2); FibProperty(n-1); + } +} +``` + +The postcondition of this lemma (keyword `ensures`) gives the proof +goal. As in any program-correctness logic (e.g., +[@Hoare:AxiomaticBasis]), the postcondition must +be established on every control path through the lemma's body. For +`FibProperty`, I give the proof by +an `if` statement, hence introducing a case split. The then branch is empty, because +Dafny can prove the postcondition automatically in this case. The else branch +performs two recursive calls to the lemma. These are the invocations of the induction +hypothesis and they follow the usual program-correctness rules, +namely: the precondition must hold at the call site, the call must terminate, and then +the caller gets to assume the postcondition upon return. The "proof glue" needed +to complete the proof is done automatically by Dafny. + +Dafny features an aggregate statement using which it is possible to make (possibly +infinitely) many calls at once. For example, the induction hypothesis can be called +at once on all values `n'` smaller than `n`: + +``` +forall n' | 0 <= n' < n { + FibProperty(n'); +} +``` + +For our purposes, this corresponds to _strong induction_. More +generally, the `forall` statement has the form + +``` +forall k | P(k) + ensures Q(k) +{ Statements; } +``` + +Logically, this statement corresponds to _universal introduction_: the body proves that +`Q(k)` holds for an arbitrary `k` such that `P(k)`, and the conclusion of the `forall` statement +is then $\forall k \bullet\; P(k) \Imp Q(k)$. When the body of the `forall` statement is +a single call (or `calc` statement), the `ensures` clause is inferred and can be omitted, +like in our `FibProperty` example. + +Lemma `FibProperty` is simple enough that its whole body can be replaced by the one +`forall` statement above. In fact, Dafny goes one step further: it automatically +inserts such a `forall` statement at the beginning of every lemma [@Leino:induction]. +Thus, `FibProperty` can be declared and proved simply by: + +``` {.para-end} +lemma FibProperty(n: nat) + ensures fib(n) % 2 == 0 <==> n % 3 == 0 +{ } +``` + +Going in the other direction from universal introduction is existential elimination, +also known as Skolemization. Dafny has a statement for this, too: +for any variable `x` and boolean expression `Q`, the +_assign such that_ statement `x :| Q;` says to assign to `x` a value such that `Q` +will hold. A proof obligation when using this statement is to show that there +exists an `x` such that `Q` holds. For example, if the fact +$\exists k \bullet\; 100 \leq \fib(k) < 200$ is known, then the statement +`k :| 100 <= fib(k) < 200;` will assign to `k` some value (chosen arbitrarily) +for which `fib(k)` falls in the given range. + +### Extreme Predicates in Dafny {#sec-friendliness} + +In this previous subsection, I explained that a `predicate` declaration introduces a +well-founded predicate. The declarations for introducing extreme predicates are +`inductive predicate` and `copredicate`. Here is the definition of the least and +greatest solutions of $g$ from above, let's call them `g` and `G`: + +``` +inductive predicate g(x: int) { x == 0 || g(x-2) } +copredicate G(x: int) { x == 0 || G(x-2) } +``` + +When Dafny receives either of these definitions, it automatically declares the corresponding +prefix predicates. Instead of the names $\iter{g}_k$ and $\Iter{g}_k$ that I used above, Dafny +names the prefix predicates `g#[k]` and `G#[k]`, respectively, that is, the name of +the extreme predicate appended with `#`, and the subscript is given as an argument in +square brackets. The definition of the prefix predicate derives from the body of +the extreme predicate and follows the form in [#eq-least-approx] and [#eq-greatest-approx]. +Using a faux-syntax for illustrative purposes, here are the prefix +predicates that Dafny defines automatically from the extreme +predicates `g` and `G`: + +``` +predicate g#[_k: nat](x: int) { _k != 0 && (x == 0 || g#[_k-1](x-2)) } +predicate G#[_k: nat](x: int) { _k != 0 ==> (x == 0 || G#[_k-1](x-2)) } +``` + +The Dafny verifier is aware of the connection between extreme predicates and their +prefix predicates, [#eq-least-is-exists] and [#eq-greatest-is-forall]. + +Remember that to be well defined, the defining functor of an extreme predicate +must be monotonic, and for [#eq-least-is-exists] and [#eq-greatest-is-forall] to hold, +the functor must be continuous. Dafny enforces the former of these by checking that +recursive calls of extreme predicates are in positive positions. The continuity +requirement comes down to checking that they are also in _continuous positions_: +that recursive calls to inductive predicates are +not inside unbounded universal quantifiers and that recursive calls to co-predicates +are not inside unbounded existential quantifiers [@Milner:CCS; @LeinoMoskal:Coinduction]. + +### Proofs about Extreme Predicates + +From what I have presented so far, we can do the formal proofs from Sections +[#sec-example-least-solution] and [#sec-example-greatest-solution]. Here is the +former: + +``` +lemma EvenNat(x: int) + requires g(x) + ensures 0 <= x && x % 2 == 0 +{ + var k: nat :| g#[k](x); + EvenNatAux(k, x); +} +lemma EvenNatAux(k: nat, x: int) + requires g#[k](x) + ensures 0 <= x && x % 2 == 0 +{ + if x == 0 { } else { EvenNatAux(k-1, x-2); } +} +``` + +Lemma `EvenNat` states the property we wish to prove. From its +precondition (keyword `requires`) and +[#eq-least-is-exists], we know there is some `k` that will make the condition in the +assign-such-that statement true. Such a value is then assigned to `k` and passed to +the auxiliary lemma, which promises to establish the proof goal. Given the condition +`g#[k](x)`, the definition of `g#` lets us conclude `k != 0` as well as the disjunction +`x == 0 || g#[k-1](x-2)`. The then branch considers the case of the first disjunct, +from which the proof goal follows automatically. The else branch can then assume +`g#[k-1](x-2)` and calls the induction hypothesis with those parameters. The proof +glue that shows the proof goal for `x` to follow from the proof goal with `x-2` is +done automatically. + +Because Dafny automatically inserts the statement + +``` +forall k', x' | 0 <= k' < k && g#[k'](x') { + EvenNatAux(k', x'); +} +``` + +at the beginning of the body of `EvenNatAux`, the body can be left empty and Dafny +completes the proof automatically. + +Here is the Dafny program that gives the proof from Section [#sec-example-greatest-solution]: + +``` {.para-end} +lemma Always(x: int) + ensures G(x) +{ forall k: nat { AlwaysAux(k, x); } } +lemma AlwaysAux(k: nat, x: int) + ensures G#[k](x) +{ } +``` + +While each of these proofs involves only basic proof rules, the setup feels a bit clumsy, +even with the empty body of the auxiliary lemmas. Moreover, +the proofs do not reflect the intuitive proofs I described in +Section [#sec-example-least-solution] and [#sec-example-greatest-solution]. +These shortcoming are addressed in the next subsection. + +### Nicer Proofs of Extreme Predicates + +The proofs we just saw follow standard forms: +use Skolemization to convert the inductive predicate into a prefix predicate for some `k` +and then do the proof inductively over `k`; respectively, +by induction over `k`, prove the prefix predicate for every `k`, then use +universal introduction to convert to the coinductive predicate. +With the declarations `inductive lemma` and `colemma`, Dafny offers to +set up the proofs +in these standard forms. What is gained is not just fewer characters in the program +text, but also a possible intuitive reading of the proofs. (Okay, to be fair, the +reading is intuitive for simpler proofs; complicated proofs may or may not be intuitive.) + +Somewhat analogous to the creation of prefix predicates from extreme predicates, Dafny +automatically creates a _prefix lemma_ `L#` from each "extreme lemma" `L`. The pre- +and postconditions of a prefix lemma are copied from those of the extreme lemma, +except for the following replacements: +For an inductive lemma, Dafny looks in the precondition to find calls (in positive, continuous +positions) to inductive predicates `P(x)` and replaces these with `P#[_k](x)`. +For a +co-lemma, Dafny looks in the postcondition to find calls (in positive, continuous positions) +to co-predicates `P` (including equality among coinductive datatypes, which is a built-in +co-predicate) and replaces these with `P#[_k](x)`. +In each case, these predicates `P` are the lemma's _focal predicates_. + +The body of the extreme lemma is moved to the prefix lemma, but with +replacing each recursive +call `L(x)` with `L#[_k-1](x)` and replacing each occurrence of a call +to a focal predicate +`P(x)` with `P#[_k-1](x)`. The bodies of the extreme lemmas are then replaced as shown +in the previous subsection. By construction, this new body correctly leads to the +extreme lemma's postcondition. + +Let us see what effect these rewrites have on how one can write proofs. Here are the proofs +of our running example: + +``` +inductive lemma EvenNat(x: int) + requires g(x) + ensures 0 <= x && x % 2 == 0 +{ if x == 0 { } else { EvenNat(x-2); } } +colemma Always(x: int) + ensures G(x) +{ Always(x-2); } +``` + +Both of these proofs follow the intuitive proofs given in Sections +[#sec-example-least-solution] and [#sec-example-greatest-solution]. Note that in these +simple examples, the user is never bothered with either prefix predicates nor +prefix lemmas---the proofs just look like "what you'd expect". + +Since Dafny automatically inserts calls to the induction hypothesis at the beginning of +each lemma, the bodies of the given extreme lemmas `EvenNat` and +`Always` can be empty and Dafny still completes the proofs. +Folks, it doesn't get any simpler than that! + +# Class Types + +```` +ClassDecl = "class" { Attribute } ClassName [ GenericParameters ] + ["extends" Type {"," Type} ] + "{" { { DeclModifier } ClassMemberDecl(moduleLevelDecl: false) } "}" +```` + +```` +ClassMemberDecl(moduleLevelDecl) = + ( FieldDecl | FunctionDecl | + MethodDecl(isGhost: ("ghost" was present), + allowConstructor: !moduleLevelDecl) + ) +```` +The ``ClassMemberDecl`` parameter `moduleLevelDecl` will be true if +the member declaration is at the top level or directly within a +module declaration. It will be false for ``ClassMemberDecl``s +that are part of a class or trait declaration. If `moduleLevelDecl` is +false ``FieldDecl``s are not allowed. + +A _class_ `C` is a reference type declared as follows: +``` +class C extends J1, ..., Jn +{ + \(_members_\) +} +``` +where the list of type parameters `T` is optional and so is +"`extends J1, ..., Jn`", which says that the class extends traits `J1` ... `Jn`. +The members of a class are _fields_, _functions_, and +_methods_. These are accessed or invoked by dereferencing a reference +to a `C` instance. + +A function or method is invoked on an _instance_ +of `C`, unless the function or method is declared `static`. +A function or method that is not `static` is called an +_instance_ function or method. + +An instance function or method takes an implicit _receiver_ +parameter, namely, the instance used to access the member. In the +specification and body of an instance function or method, the receiver +parameter can be referred to explicitly by the keyword `this`. +However, in such places, members of `this` can also be mentioned +without any qualification. To illustrate, the qualified `this.f` and +the unqualified `f` refer to the same field of the same object in the +following example: +``` +class C { + var f: int + method Example() returns (b: bool) + { + b := f == this.f; + } +} +``` +so the method body always assigns `true` to the out-parameter `b`. +There is no semantic difference between qualified and +unqualified accesses to the same receiver and member. + +A `C` instance is created using `new`, for example: +``` +c := new C; +``` + +Note that `new` simply allocates a `C` object and returns a reference +to it; the initial values of its fields are arbitrary values of their +respective types. Therefore, it is common to invoke a method, known +as an _initialization method_, immediately after creation, for +example: +``` +c := new C; +c.InitFromList(xs, 3); +``` +When an initialization method has no out-parameters and modifies no +more than `this`, then the two statements above can be combined into +one: +``` +c := new C.InitFromList(xs, 3); +``` +Note that a class can contain several initialization methods, that +these methods can be invoked at any time, not just as part of a `new`, +and that `new` does not require that an initialization method be +invoked at creation. + +A clas can declare special initializing methods called _constructor methods_. +See Section [#sec-method-declarations]. + +## Field Declarations +```` +FieldDecl = "var" { Attribute } FIdentType { "," FIdentType } +```` +An ``FIdentType`` is used to declare a field. The field name is either an +identifier (that is not allowed to start with a leading underscore) or +some digits. Digits are used if you want to number your fields, e.g. "0", +"1", etc. +```` +FIdentType = ( FieldIdent | digits ) ":" Type +```` + +A field x of some type T is declared as: +``` +var x: T +``` + +A field declaration declares one or more fields of the enclosing class. +Each field is a named part of the state of an object of that class. A +field declaration is similar to but distinct from a variable declaration +statement. Unlike for local variables and bound variables, the type is +required and will not be inferred. + +Unlike method and function declarations, a field declaration +cannot be given at the top level. Fields can be declared in either a +class or a trait. A class that inherits from multiple traits will +have all the fields declared in any of its parent traits. + +Fields that are declared as `ghost` can only be used in specifications, +not in code that will be compiled into executable code. + +Fields may not be declared static. + +`protected` is not allowed for fields. + +## Method Declarations +```` +MethodDecl(isGhost, allowConstructor) = + MethodKeyword { Attribute } [ MethodName ] + ( MethodSignature(isGhost) | SignatureEllipsis_ ) + MethodSpec [ BlockStmt ] +```` +The `isGhost` parameter is true iff the `ghost` keyword +preceded the method declaration. + +If the `allowConstructor` parameter is false then +the ``MethodDecl`` must not be a `constructor` +declaration. + +```` +MethodKeyword = ("method" | "lemma" | "colemma" + | "inductive" "lemma" | "constructor" ) +```` +The method keyword is used to specify special kinds of methods +as explained below. + +```` +MethodSignature(isGhost) = + [ GenericParameters ] + Formals(allowGhost: !isGhost) + [ "returns" Formals(allowGhost: !isGhost) ] +```` +A method signature specifies the method generic parameters, +input parameters and return parameters. +The formal parameters are not allowed to have `ghost` specified +if `ghost` was already specified for the method. + +```` +SignatureEllipsis_ = "..." +```` +A ``SignatureEllipsis_`` is used when a method or function is being redeclared +in module that refines another module. In that case the signature is +copied from the module that is being refined. This works because +Dafny does not support method or function overloading, so the +name of the class method uniquely identifies it without the +signature. + +```` +Formals(allowGhostKeyword) = + "(" [ GIdentType(allowGhostKeyword) + { "," GIdentType(allowGhostKeyword) } ] ")" +```` +The ``Formals`` specifies the names and types of the method input or +output parameters. + +See section [#sec-method-specification] for a description of ``MethodSpec``. + +A method declaration adheres to the ``MethodDecl`` grammar above. +Here is an example of a method declaration. + +``` +method {:att1}{:att2} M(a: A, b: B, c: C) returns (x: X, y: Y, z: Z) + requires Pre + modifies Frame + ensures Post + decreases Rank +{ + Body +} +``` + +where `:att1` and `:att2` are attributes of the method, +`T1` and `T2` are type parameters of the method (if generic), +`a, b, c` are the method’s in-parameters, `x, y, z` are the +method’s out-parameters, `Pre` is a boolean expression denoting the +method’s precondition, `Frame` denotes a set of objects whose fields may +be updated by the method, `Post` is a boolean expression denoting the +method’s postcondition, `Rank` is the method’s variant function, and +`Body` is a statement that implements the method. `Frame` can be a list +of expressions, each of which is a set of objects or a single object, the +latter standing for the singleton set consisting of that one object. The +method’s frame is the union of these sets, plus the set of objects +allocated by the method body. For example, if `c` and `d` are parameters +of a class type `C`, then + +``` +modifies {c, d} + +modifies {c} + {d} + +modifies c, {d} + +modifies c, d +``` + +all mean the same thing. + +A method can be declared as ghost by preceding the declaration with the +keyword ghost. By default, a method has an implicit receiver parameter, +this. This parameter can be removed by preceding the method declaration +with the keyword static. A static method M in a class C can be invoked by +C.M(…). + +In a class, a method can be declared to be a constructor method by +replacing the keyword `method` with the keyword `constructor`. A constructor +can only be called at the time an object is allocated (see +object-creation examples below), and for a class that contains one or +more constructors, object creation must be done in conjunction with a +call to a constructor. + +An ordinary method is declared with the `method` keyword. +Section [#sec-constructors] explains methods that instead use the +`constructor` keyword. Section [#sec-lemmas] discusses methods that are +declared with the `lemma` keyword. Methods declared with the `inductive` +`lemma` keywords are discussed later in the context of inductive +predicates (see [#sec-inductive-datatypes]). Methods declared with the +`colemma` keyword are discussed later in the context of co-inductive +types, in section [#sec-colemmas]. + +A method without is body is _abstract_. A method is allowed to be +abstract under the following circumstances: + +* It contains an `{:axiom}` attribute +* It contains an `{:imported}` attribute +* It contains a `{:decl}` attribute +* It is a declaration in an abstract module. +Note that when there is no body, Dafny assumes that the *ensures* +clauses are true without proof. + +### Constructors +To write structured object-oriented programs, one often relies on that +objects are constructed only in certain ways. For this purpose, Dafny +provides _constructor (method)s_, which are a restricted form of +initialization methods. A constructor is declared with the keyword +`constructor` instead of `method`. When a class contains a +constructor, every call to `new` for that class must be accompanied +with a call to one of the constructors. Moreover, a constructor +cannot be called at other times, only during object creation. Other +than these restrictions, there is no semantic difference between using +ordinary initialization methods and using constructors. + +The Dafny design allows the constructors to be named, which promotes +using names like `InitFromList` above. Still, many classes have just +one constructor or have a typical constructor. Therefore, Dafny +allows one _anonymous constructor_, that is, a constructor whose name +is essentially "". For example: +``` +class Item { + constructor (x: int, y: int) + // ... +} +``` +When invoking this constructor, the "`.`" is dropped, as in: +``` +m := new Item(45, 29); +``` +Note that an anonymous constructor is just one way to name a +constructor; there can be other constructors as well. + +### Lemmas +Sometimes there are steps of logic required to prove a program correct, +but they are too complex for Dafny to discover and use on its own. When +this happens, we can often give Dafny assistance by providing a lemma. +This is done by declaring a method with the `lemma` keyword. +Lemmas are implicitly ghost methods and the `ghost` keyword cannot +be applied to them. + +For an example, see the `FibProperty` lemma in +Section [#sec-proofs-in-dafny]. + +See [the Dafny Lemmas tutorial](http://rise4fun.com/Dafny/tutorial/Lemmas) +for more examples and hints for using lemmas. + +## Function Declarations + +```` +FunctionDecl = + ( "function" [ "method" ] { Attribute } + FunctionName + FunctionSignatureOrEllipsis_(allowGhostKeyword: ("method" present)) + | "predicate" [ "method" ] { Attribute } + PredicateName + PredicateSignatureOrEllipsis_(allowGhostKeyword: ("method" present)) + | "inductive" "predicate" { Attribute } + PredicateName + PredicateSignatureOrEllipsis_(allowGhostKeyword: false) + | "copredicate" { Attribute } + CopredicateName + PredicateSignatureOrEllipsis_(allowGhostKeyword: false) + ) + FunctionSpec [ FunctionBody ] + +FunctionSignatureOrEllipsis_(allowGhostKeyword) = + FunctionSignature_ | SignatureEllipsis_ +FunctionSignature_(allowGhostKeyword) = + [ GenericParameters ] Formals(allowGhostKeyword) ":" Type + +PredicateSignatureOrEllipsis_(allowGhostKeyword) = + PredicateSignature_(allowGhostKeyword) | SignatureEllipsis_ +PredicateSignature_(allowGhostKeyword) = + [ GenericParameters ] Formals(allowGhostKeyword) + +FunctionBody = "{" Expression(allowLemma: true, allowLambda: true) "}" +```` +In the above productions, allowGhostKeyword is true if the optional +"method" keyword was specified. This allows some of the +formal parameters of a function method to be specified as ghost. + +See section [#sec-function-specification] for a description of ``FunctionSpec``. + +A Dafny function is a pure mathematical function. It is allowed to +read memory that was specified in its `reads` expression but is not +allowed to have any side effects. + +Here is an example function declaration: +``` +function {:att1}{:att2} F(a: A, b: B, c: C): T + requires Pre + reads Frame + ensures Post + decreases Rank +{ + Body +} +``` + +where `:att1` and `:att2` are attributes of the function, if any, `T1` +and `T2` are type parameters of the function (if generic), `a, b, c` are +the functions’s parameters, `T` is the type of the function’s result, +`Pre` is a boolean expression denoting the function’s precondition, +`Frame` denotes a set of objects whose fields the function body may +depend on, `Post` is a boolean expression denoting the function’s +postcondition, `Rank` is the function’s variant function, and `Body` is +an expression that defines the function return value. The precondition +allows a function to be partial, that is, the precondition says when the +function is defined (and Dafny will verify that every use of the function +meets the precondition). The postcondition is usually not needed, since +the body of the function gives the full definition. However, the +postcondition can be a convenient place to declare properties of the +function that may require an inductive proof to establish. For example: + +```` +function Factorial(n: int): int + requires 0 <= n + ensures 1 <= Factorial(n) +{ + if n == 0 then 1 else Factorial(n-1) * n +} +```` + +says that the result of Factorial is always positive, which Dafny +verifies inductively from the function body. To refer to the function’s +result in the postcondition, use the function itself, as shown in the +example. + +By default, a function is *ghost*, and cannot be called from non-ghost +code. To make it non-ghost, replace the keyword function with the two +keywords "function method". + +By default, a function has an implicit receiver parameter, `this`. This +parameter can be removed by preceding the function declaration with the +keyword `static`. A static function `F` in a class `C` can be invoked +by `C.F(…)`. This can give a convenient way to declare a number of helper +functions in a separate class. + +As for methods, a ``SignatureEllipsis_`` is used when declaring +a function in a module refinement. For example, if module `M0` declares +function `F`, a module `M1` can be declared to refine `M0` and +`M1` can then refine `F`. The refinement function, `M1.F` can have +a ``SignatureEllipsis_`` which means to copy the signature form +`M0.F`. A refinement function can furnish a body for a function +(if `M0.F` does not provide one). It can also add **ensures** +clauses. And if `F` is a predicate, it can add conjuncts to +a previously given body. + +### Function Transparency +A function is said to be _transparent_ in a location if the +contents of the body of the function is visible at that point. +A function is said to be _opaque_ at a location if it is not +transparent. However the ``FunctionSpec`` of a function +is always available. + +A function is usually transparent up to some unrolling level (up to +1, or maybe 2 or 3). If its arguments are all literals it is +transparent all the way. + +But the transparency of a function is affected by the following: + +* whether the function was declared to be protected, and +* whether the function was given the `{:opaque}` attribute (as explained +in Section [#sec-opaque]). + +The following table summarizes where the function is transparent. +The module referenced in the table is the module in which the +function is defined. + ++------------+--------------+-------------+-------------+ +| Protected? | `{:opaque}`? | Transparent | Transparent | +| | | Inside | Outside | +| | | Module | Module | ++:----------:+:------------:+:-----------:+:-----------:+ +| N | N | Y | Y | +| Y | N | Y | N | +| N | Y | N | N | ++------------+--------------+-------------+-------------+ + +When `{:opaque}` is specified for function `g`, `g` is opaque, +however the lemma `reveal_g` is available to give the semantics +of `g` whether in the defining module or outside. + +It currently is not allowed to have both `protected` and +`{:opaque}` specified for a function. + +### Predicates +A function that returns a `bool` results is called a _predicate_. As an +alternative syntax, a predicate can be declared by replacing the `function` +keyword with the `predicate` keyword and omitting a declaration of the +return type. + +### Inductive Predicates and Lemmas +See section [#sec-friendliness] for descriptions +of inductive predicates and lemmas. + +# Trait Types +```` +TraitDecl = "trait" { Attribute } TraitName [ GenericParameters ] + "{" { { DeclModifier } ClassMemberDecl(moduleLevelDecl: false) } "}" +```` + +A _trait_ is an "abstract superclass", or call it an "interface" or +"mixin". Traits are new to Dafny and are likely to evolve for a +while. + +The declaration of a trait is much like that of a class: +``` +trait J +{ + \(_members_\) +} +``` +where `\(_members_\)` can include fields, functions, and methods, but +no constructor methods. The functions and methods are allowed to be +declared `static`. + +A reference type `C` that extends a trait `J` is assignable to `J`, but +not the other way around. The members of `J` are available as members +of `C`. A member in `J` is not allowed to be redeclared in `C`, +except if the member is a non-`static` function or method without a +body in `J`. By doing so, type `C` can supply a stronger +specification and a body for the member. + +`new` is not allowed to be used with traits. Therefore, there is no +object whose allocated type is a trait. But there can of course be +objects of a class `C` that implements a trait `J`, and a reference to +such a `C` object can be used as a value of type `J`. + +As an example, the following trait represents movable geometric shapes: +``` +trait Shape +{ + function method Width(): real + reads this + method Move(dx: real, dy: real) + modifies this + method MoveH(dx: real) + modifies this + { + Move(dx, 0.0); + } +} +``` +Members `Width` and `Move` are _abstract_ (that is, body less) and can +be implemented differently by different classes that extend the trait. +The implementation of method `MoveH` is given in the trait and thus +gets used by all classes that extend `Shape`. Here are two classes +that each extends `Shape`: +``` +class UnitSquare extends Shape +{ + var x: real, y: real + function method Width(): real { // note the empty reads clause + 1.0 + } + method Move(dx: real, dy: real) + modifies this + { + x, y := x + dx, y + dy; + } +} +class LowerRightTriangle extends Shape +{ + var xNW: real, yNW: real, xSE: real, ySE: real + function method Width(): real + reads this + { + xSE - xNW + } + method Move(dx: real, dy: real) + modifies this + { + xNW, yNW, xSE, ySE := xNW + dx, yNW + dy, xSE + dx, ySE + dy; + } +} +``` +Note that the classes can declare additional members, that they supply +implementations for the abstract members of the trait, +that they repeat the member signatures, and that they are responsible +for providing their own member specifications that both strengthen the +corresponding specification in the trait and are satisfied by the +provided body. +Finally, here is some code that creates two class instances and uses +them together as shapes: +``` +var myShapes: seq; +var A := new UnitSquare; +myShapes := [A]; +var tri := new LowerRightTriangle; +// myShapes contains two Shape values, of different classes +myShapes := myShapes + [tri]; +// move shape 1 to the right by the width of shape 0 +myShapes[1].MoveH(myShapes[0].Width()); +``` + +# Array Types +```` +ArrayType_ = arrayToken [ GenericInstantiation ] +```` + +Dafny supports mutable fixed-length _array types_ of any positive +dimension. Array types are reference types. + +## One-dimensional arrays + +A one-dimensional array of `n` `T` elements is created as follows: +``` +a := new T[n]; +``` +The initial values of the array elements are arbitrary values of type +`T`. +The length of an array is retrieved using the immutable `Length` +member. For example, the array allocated above satisfies: +``` +a.Length == n +``` + +For any integer-based numeric `i` in the range `0 <= i < a.Length`, +the _array selection_ expression `a[i]` retrieves element `i` (that +is, the element preceded by `i` elements in the array). The +element stored at `i` can be changed to a value `t` using the array +update statement: +``` +a[i] := t; +``` + +Caveat: The type of the array created by `new T[n]` is +`array`. A mistake that is simple to make and that can lead to +befuddlement is to write `array` instead of `T` after `new`. +For example, consider the following: +``` +var a := new array; +var b := new array[n]; +var c := new array(n); // resolution error +var d := new array(n); // resolution error +``` +The first statement allocates an array of type `array`, but of +unknown length. The second allocates an array of type +`array>` of length `n`, that is, an array that holds `n` +values of type `array`. The third statement allocates an +array of type `array` and then attempts to invoke an anonymous +constructor on this array, passing argument `n`. Since `array` has no +constructors, let alone an anonymous constructor, this statement +gives rise to an error. If the type-parameter list is omitted for a +type that expects type parameters, Dafny will attempt to fill these +in, so as long as the `array` type parameter can be inferred, it is +okay to leave off the "``" in the fourth statement above. However, +as with the third statement, `array` has no anonymous constructor, so +an error message is generated. + +One-dimensional arrays support operations that convert a stretch of +consecutive elements into a sequence. For any array `a` of type +`array`, integer-based numerics `lo` and `hi` satisfying +`0 <= lo <= hi <= a.Length`, the following operations each yields a +`seq`: + ++---------------------+------------------------------------+ +| expression | description | ++---------------------+------------------------------------+ +| `a[lo..hi]` | subarray conversion to sequence | +| `a[lo..]` | drop | +| `a[..hi]` | take | +| `a[..]` | array conversion to sequence | ++---------------------+------------------------------------+ + +The expression `a[lo..hi]` takes the first `hi` elements of the array, +then drops the first `lo` elements thereof and returns what remains as +a sequence. The resulting sequence thus has length `hi - lo`. +The other operations are special instances of the first. If `lo` is +omitted, it defaults to `0` and if `hi` is omitted, it defaults to +`a.Length`. +In the last operation, both `lo` and `hi` have been omitted, thus +`a[..]` returns the sequence consisting of all the array elements of +`a`. + +The subarray operations are especially useful in specifications. For +example, the loop invariant of a binary search algorithm that uses +variables `lo` and `hi` to delimit the subarray where the search `key` +may be still found can be expressed as follows: +``` +key !in a[..lo] && key !in a[hi..] +``` +Another use is to say that a certain range of array elements have not +been changed since the beginning of a method: +``` +a[lo..hi] == old(a[lo..hi]) +``` +or since the beginning of a loop: +``` +ghost var prevElements := a[..]; +while // ... + invariant a[lo..hi] == prevElements[lo..hi] +{ + // ... +} +``` +Note that the type of `prevElements` in this example is `seq`, if +`a` has type `array`. + +A final example of the subarray operation lies in expressing that an +array's elements are a permutation of the array's elements at the +beginning of a method, as would be done in most sorting algorithms. +Here, the subarray operation is combined with the sequence-to-multiset +conversion: +``` +multiset(a[..]) == multiset(old(a[..])) +``` + +## Multi-dimensional arrays + +An array of 2 or more dimensions is mostly like a one-dimensional +array, except that `new` takes more length arguments (one for each +dimension), and the array selection expression and the array update +statement take more indices. For example: +``` +matrix := new T[m, n]; +matrix[i, j], matrix[x, y] := matrix[x, y], matrix[i, j]; +``` +create a 2-dimensional array whose dimensions have lengths `m` and +`n`, respectively, and then swaps the elements at `i,j` and `x,y`. +The type of `matrix` is `array2`, and similarly for +higher-dimensional arrays (`array3`, `array4`, etc.). Note, +however, that there is no type `array0`, and what could have been +`array1` is actually named just `array`. + +The `new` operation above requires `m` and `n` to be non-negative +integer-based numerics. These lengths can be retrieved using the +immutable fields `Length0` and `Length1`. For example, the following +holds of the array created above: +``` +matrix.Length0 == m && matrix.Length1 == n +``` +Higher-dimensional arrays are similar (`Length0`, `Length1`, +`Length2`, ...). The array selection expression and array update +statement require that the indices are in bounds. For example, the +swap statement above is well-formed only if: +``` +0 <= i < matrix.Length0 && 0 <= j < matrix.Length1 && +0 <= x < matrix.Length0 && 0 <= y < matrix.Length1 +``` + +In contrast to one-dimensional arrays, there is no operation to +convert stretches of elements from a multi-dimensional array to a +sequence. + +# Type object +```` +ObjectType_ = "object" +```` + +There is a built-in trait `object` that is like a supertype of all +reference types.[^fn-object-trait] Every class automatically extends +object and so does every user-defined trait. The purpose of type `object` +is to enable a uniform treatment of _dynamic frames_. In particular, it +is useful to keep a ghost field (typically named `Repr` for +"representation") of type `set`. + +[^fn-object-trait]: The current compiler restriction that `object` cannot + be used as a type parameter needs to be removed. + +# Iterator types +```` +IteratorDecl = "iterator" { Attribute } IteratorName + ( [ GenericParameters ] + Formals(allowGhostKeyword: true) + [ "yields" Formals(allowGhostKeyword: true) ] + | "..." + ) + IteratorSpec [ BlockStmt ] +```` + +See section [#sec-iterator-specification] for a description of ``IteratorSpec``. + +An _iterator_ provides a programming abstraction for writing code that +iteratively returns elements. These CLU-style iterators are +_co-routines_ in the sense that they keep track of their own program +counter and control can be transferred into and out of the iterator +body. + +An iterator is declared as follows: +``` +iterator Iter(\(_in-params_\)) yields (\(_yield-params_\)) + \(_specification_\) +{ + \(_body_\) +} +``` +where `T` is a list of type parameters (as usual, if there are no type +parameters, "``" is omitted). This declaration gives rise to a +reference type with the same name, `Iter`. In the signature, +in-parameters and yield-parameters are the iterator's analog of a +method's in-parameters and out-parameters. The difference is that the +out-parameters of a method are returned to a caller just once, whereas +the yield-parameters of an iterator are returned each time the iterator +body performs a `yield`. The body consists of statements, like in a +method body, but with the availability also of `yield` statements. + +From the perspective of an iterator client, the `iterator` declaration +can be understood as generating a class `Iter` with various +members, a simplified version of which is described next. + +The `Iter` class contains an anonymous constructor whose parameters +are the iterator's in-parameters: +``` +predicate Valid() +constructor (\(_in-params_\)) + modifies this + ensures Valid() +``` +An iterator is created using `new` and this anonymous constructor. +For example, an iterator willing to return ten consecutive integers +from `start` can be declared as follows: +``` +iterator Gen(start: int) yields (x: int) +{ + var i := 0; + while i < 10 { + x := start + i; + yield; + i := i + 1; + } +} +``` +An instance of this iterator is created using: +``` +iter := new Gen(30); +``` + +The predicate `Valid()` says when the iterator is in a state where one +can attempt to compute more elements. It is a postcondition of the +constructor and occurs in the specification of the `MoveNext` member: +``` +method MoveNext() returns (more: bool) + requires Valid() + modifies this + ensures more ==> Valid() +``` +Note that the iterator remains valid as long as `MoveNext` returns +`true`. Once `MoveNext` returns `false`, the `MoveNext` method can no +longer be called. Note, the client is under no obligation to keep +calling `MoveNext` until it returns `false`, and the body of the +iterator is allowed to keep returning elements forever. + +The in-parameters of the iterator are stored in immutable fields of +the iterator class. To illustrate in terms of the example above, the +iterator class `Gen` contains the following field: +``` +var start: int +``` +The yield-parameters also result in members of the iterator class: +``` +var x: int +``` +These fields are set by the `MoveNext` method. If `MoveNext` returns +`true`, the latest yield values are available in these fields and the +client can read them from there. + +To aid in writing specifications, the iterator class also contains +ghost members that keep the history of values returned by +`MoveNext`. The names of these ghost fields follow the names of the +yield-parameters with an "`s`" appended to the name (to suggest +plural). Name checking rules make sure these names do not give rise +to ambiguities. The iterator class for `Gen` above thus contains: +``` +ghost var xs: seq +``` +These history fields are changed automatically by `MoveNext`, but are +not assignable by user code. + +Finally, the iterator class contains some special fields for use in +specifications. In particular, the iterator specification gets +recorded in the following immutable fields: +``` +ghost var _reads: set +ghost var _modifies: set +ghost var _decreases0: T0 +ghost var _decreases1: T1 +// ... +``` +where there is a `_decreases\(_i_\): T\(_i_\)` field for each +component of the iterator's `decreases` +clause.[^fn-iterator-field-names] +In addition, there is a field: +``` +ghost var _new: set; +``` +to which any objects allocated on behalf of the iterator body get +added. The iterator body is allowed to remove elements from the +`_new` set, but cannot by assignment to `_new` add any elements. + +[^fn-iterator-field-names]: It would make sense to rename the special + fields `_reads` and `_modifies` to have the same names as the + corresponding keywords, `reads` and `modifies`, as is done for + function values. Also, the various `_decreases\(_i_\)` fields can + combined into one field named `decreases` whose type is a + _n_-tuple. Thse changes may be incorporated into a future version + of Dafny. + +Note, in the precondition of the iterator, which is to hold upon +construction of the iterator, the in-parameters are indeed +in-parameters, not fields of `this`. + +It's regrettably tricky to use iterators. The language really +ought to have a `foreach` statement to make this easier. +Here is an example showing definition and use of an iterator. + +``` +iterator Iter(s: set) yields (x: T) + yield ensures x in s && x !in xs[..|xs|-1]; + ensures s == set z | z in xs; +{ + var r := s; + while (r != {}) + invariant forall z :: z in xs ==> x !in r; // r and xs are disjoint + invariant s == r + set z | z in xs; + { + var y :| y in r; + r, x := r - {y}, y; + yield; + assert y == xs[|xs|-1]; // needed as a lemma to prove loop invariant + } +} + +method UseIterToCopy(s: set) returns (t: set) + ensures s == t; +{ + t := {}; + var m := new Iter(s); + while (true) + invariant m.Valid() && fresh(m._new); + invariant t == set z | z in m.xs; + decreases s - t; + { + var more := m.MoveNext(); + if (!more) { break; } + t := t + {m.x}; + } +} +``` + + + +# Function types + +```` +Type = DomainType "->" Type +```` + +Functions are first-class values in Dafny. Function types have the form +`(T) -> U` where `T` is a comma-delimited list of types and `U` is a +type. `T` is called the function's _domain type(s)_ and `U` is its +_range type_. For example, the type of a function +``` +function F(x: int, b: bool): real +``` +is `(int, bool) -> real`. Parameters are not allowed to be ghost. + +To simplify the appearance of the basic case where a function's +domain consist of a list of exactly one type, the parentheses around +the domain type can be dropped in this case, as in `T -> U`. +This innocent simplification requires additional explanation in the +case where that one type is a tuple type, since tuple types are also +written with enclosing parentheses. +If the function takes a single argument that is a tuple, an additional +set of parentheses is needed. For example, the function +``` +function G(pair: (int, bool)): real +``` +has type `((int, bool)) -> real`. Note the necessary double +parentheses. Similarly, a function that takes no arguments is +different from one that takes a 0-tuple as an argument. For instance, +the functions +``` +function NoArgs(): real +function Z(unit: ()): real +``` +have types `() -> real` and `(()) -> real`, respectively. + +The function arrow, `->`, is right associative, so `A -> B -> C` means +`A -> (B -> C)`. The other association requires explicit parentheses: +`(A -> B) -> C`. + +Note that the receiver parameter of a named function is not part of +the type. Rather, it is used when looking up the function and can +then be thought of as being captured into the function definition. +For example, suppose function `F` above is declared in a class `C` and +that `c` references an object of type `C`; then, the following is type +correct: +``` +var f: (int, bool) -> real := c.F; +``` +whereas it would have been incorrect to have written something like: +``` +var f': (C, int, bool) -> real := F; // not correct +``` + +Outside its type signature, each function value has three properties, +described next. + +Every function implicitly takes the heap as an argument. No function +ever depends on the _entire_ heap, however. A property of the +function is its declared upper bound on the set of heap locations it +depends on for a given input. This lets the verifier figure out that +certain heap modifications have no effect on the value returned by a +certain function. For a function `f: T -> U` and a value `t` of type +`T`, the dependency set is denoted `f.reads(t)` and has type +`set`. + +The second property of functions stems from the fact that every function +is potentially _partial_. In other words, a property of a function is its +_precondition_. For a function `f: T -> U`, the precondition of `f` for a +parameter value `t` of type `T` is denoted `f.requires(t)` and has type +`bool`. + +The third property of a function is more obvious---the function's +body. For a function `f: T -> U`, the value that the function yields +for an input `t` of type `T` is denoted `f(t)` and has type `U`. + +Note that `f.reads` and `f.requires` are themselves functions. +Suppose `f` has type `T -> U` and `t` has type `T`. Then, `f.reads` +is a function of type `T -> set` whose `reads` and `requires` +properties are: +``` +f.reads.reads(t) == f.reads(t) +f.reads.requires(t) == true +``` +`f.requires` is a function of type `T -> bool` whose `reads` and +`requires` properties are: +``` +f.requires.reads(t) == f.reads(t) +f.requires.requires(t) == true +``` + +Dafny also support anonymous functions by means of +_lambda expressions_. See section [#sec-lambda-expressions]. + +# Algebraic Datatypes + +Dafny offers two kinds of algebraic datatypes, those defined +inductively and those defined co-inductively. The salient property of +every datatype is that each value of the type uniquely identifies one +of the datatype's constructors and each constructor is injective in +its parameters. + +```` +DatatypeDecl = ( InductiveDatatypeDecl | CoinductiveDatatypeDecl ) +```` + +## Inductive datatypes + +```` +InductiveDatatypeDecl_ = "datatype" { Attribute } DatatypeName [ GenericParameters ] + "=" DatatypeMemberDecl { "|" DatatypeMemberDecl } [ ";" ] +DatatypeMemberDecl = { Attribute } DatatypeMemberName [ FormalsOptionalIds ] +```` + +The values of inductive datatypes can be seen as finite trees where +the leaves are values of basic types, numeric types, reference types, +co-inductive datatypes, or function types. Indeed, values of +inductive datatypes can be compared using Dafny's well-founded +[<]{.monospace} ordering. + +An inductive datatype is declared as follows: +``` +datatype D = \(_Ctors_\) +``` +where `\(_Ctors_\)` is a nonempty `|`-separated list of +_(datatype) constructors_ for the datatype. Each constructor has the +form: +``` +C(\(_params_\)) +``` +where `\(_params_\)` is a comma-delimited list of types, optionally +preceded by a name for the parameter and a colon, and optionally +preceded by the keyword `ghost`. If a constructor has no parameters, +the parentheses after the constructor name can be omitted. If no +constructor takes a parameter, the type is usually called an +_enumeration_; for example: +``` +datatype Friends = Agnes | Agatha | Jermaine | Jack +``` + +For every constructor `C`, Dafny defines a _discriminator_ `C?`, which +is a member that returns `true` if and only if the datatype value has +been constructed using `C`. For every named parameter `p` of a +constructor `C`, Dafny defines a _destructor_ `p`, which is a member +that returns the `p` parameter from the `C` call used to construct the +datatype value; its use requires that `C?` holds. For example, for +the standard `List` type +``` +datatype List = Nil | Cons(head: T, tail: List) +``` +the following holds: +``` +Cons(5, Nil).Cons? && Cons(5, Nil).head == 5 +``` +Note that the expression +``` +Cons(5, Nil).tail.head +``` +is not well-formed, since `Cons(5, Nil).tail` does not satisfy +`Cons?`. + +The names of the destructors must be unique across all the +constructors of the datatype. A constructor can have the same name as +the enclosing datatype; this is especially useful for +single-constructor datatypes, which are often called +_record types_. For example, a record type for black-and-white pixels +might be represented as follows: +``` +datatype Pixel = Pixel(x: int, y: int, on: bool) +``` + +To call a constructor, it is usually necessary only to mention the +name of the constructor, but if this is ambiguous, it is always +possible to qualify the name of constructor by the name of the +datatype. For example, `Cons(5, Nil)` above can be written +``` +List.Cons(5, List.Nil) +``` + +As an alternative to calling a datatype constructor explicitly, a +datatype value can be constructed as a change in one parameter from a +given datatype value using the _datatype update_ expression. For any +`d` whose type is a datatype that includes a constructor `C` that has +a parameter (destructor) named `f` of type `T`, and any expression `t` +of type `T`, +``` +d[f := t] +``` +constructs a value like `d` but whose `f` parameter is `t`. The +operation requires that `d` satisfies `C?`. For example, the +following equality holds: +``` +Cons(4, Nil)[tail := Cons(3, Nil)] == Cons(4, Cons(3, Nil)) +``` + +The datatype update expression also accepts multiple field +names, provided these are distinct. For example, a node of some +inductive datatype for trees may be updated as follows: + +``` +node[left := L, right := R] +``` + +## Tuple types +```` +TupleType_ = "(" [ Type { "," Type } ] ")" +```` + +Dafny builds in record types that correspond to tuples and gives these +a convenient special syntax, namely parentheses. For example, what +might have been declared as: +``` +datatype Pair = Pair(0: T, 1: U) +``` +Dafny provides as the type `(T, U)` and the constructor `(t, u)`, as +if the datatype's name were "" and its type arguments are given in +round parentheses, and as if the constructor name were "". Note that +the destructor names are `0` and `1`, which are legal identifier names +for members. For example, showing the use of a tuple destructor, here +is a property that holds of 2-tuples (that is, _pairs_): +``` +(5, true).1 == true +``` + +Dafny declares _n_-tuples where _n_ is 0 or 2 or up. There are no +1-tuples, since parentheses around a single type or a single value have +no semantic meaning. The 0-tuple type, `()`, is often known as the +_unit type_ and its single value, also written `()`, is known as _unit_. + +## Co-inductive datatypes + +```` +CoinductiveDatatypeDecl_ = "codatatype" { Attribute } DatatypeName [ GenericParameters ] + "=" DatatypeMemberDecl { "|" DatatypeMemberDecl } [ ";" ] +```` + +Whereas Dafny insists that there is a way to construct every inductive +datatype value from the ground up, Dafny also supports +_co-inductive datatypes_, whose constructors are evaluated lazily and +hence allows infinite structures. A co-inductive datatype is declared +using the keyword `codatatype`; other than that, it is declared and +used like an inductive datatype. + +For example, +``` +codatatype IList = Nil | Cons(head: T, tail: IList) +codatatype Stream = More(head: T, tail: Stream) +codatatype Tree = Node(left: Tree, value: T, right: Tree) +``` +declare possibly infinite lists (that is, lists that can be either +finite or infinite), infinite streams (that is, lists that are always +infinite), and infinite binary trees (that is, trees where every +branch goes on forever), respectively. + +The paper [Co-induction Simply], by Leino and +Moskal[@LEINO:Dafny:Coinduction], explains Dafny's implementation and +verification of co-inductive types. We capture the key features from that +paper in this section but the reader is referred to that paper for more +complete details and to supply bibliographic references that we have +omitted. + +Mathematical induction is a cornerstone of programming and program +verification. It arises in data definitions (e.g., some algebraic data +structures can be described using induction), it underlies program +semantics (e.g., it explains how to reason about finite iteration and +recursion), and it gets used in proofs (e.g., supporting lemmas about +data structures use inductive proofs). Whereas induction deals with +finite things (data, behavior, etc.), its dual, co-induction, deals with +possibly infinite things. Co-induction, too, is important in programming +and program verification, where it arises in data definitions (e.g., lazy +data structures), semantics (e.g., concurrency), and proofs (e.g., +showing refinement in a co-inductive big-step semantics). It is thus +desirable to have good support for both induction and co-induction in a +system for constructing and reasoning about programs. + +Co-datatypes and co-recursive functions make it possible to use lazily +evaluated data structures (like in Haskell or Agda). Co-predicates, +defined by greatest fix-points, let programs state properties of such +data structures (as can also be done in, for example, Coq). For the +purpose of writing co-inductive proofs in the language, we introduce +co-lemmas. Ostensibly, a co-lemma invokes the co-induction hypothesis +much like an inductive proof invokes the induction hypothesis. Underneath +the hood, our co-inductive proofs are actually approached via induction: +co-lemmas provide a syntactic veneer around this approach. + +The following example gives a taste of how the co-inductive features in +Dafny come together to give straightforward definitions of infinite +matters. +``` +// infinite streams +codatatype IStream = ICons(head: T, tail: IStream) + +// pointwise product of streams +function Mult(a: IStream, b: IStream): IStream +{ ICons(a.head * b.head, Mult(a.tail, b.tail)) } + +// lexicographic order on streams +copredicate Below(a: IStream, b: IStream) +{ a.head <= b.head && ((a.head == b.head) ==> Below(a.tail, b.tail)) } + +// a stream is Below its Square +colemma Theorem_BelowSquare(a: IStream) +ensures Below(a, Mult(a, a)) +{ assert a.head <= Mult(a, a).head; + if a.head == Mult(a, a).head { + Theorem_BelowSquare(a.tail); + } +} + +// an incorrect property and a bogus proof attempt +colemma NotATheorem_SquareBelow(a: IStream) + ensures Below(Mult(a, a), a); // ERROR +{ + NotATheorem_SquareBelow(a); +} +``` + +It defines a type `IStream` of infinite streams, with constructor `ICons` and +destructors `head` and `tail`. Function `Mult` performs pointwise +multiplication on infinite streams of integers, defined using a +co-recursive call (which is evaluated lazily). Co-predicate `Below` is +defined as a greatest fix-point, which intuitively means that the +co-predicate will take on the value true if the recursion goes on forever +without determining a different value. The co-lemma states the theorem +`Below(a, Mult(a, a))`. Its body gives the proof, where the recursive +invocation of the co-lemma corresponds to an invocation of the +co-induction hypothesis. + +The proof of the theorem stated by the first co-lemma lends +itself to the following intuitive reading: To prove that `a` is below +`Mult(a, a)`, check that their heads are ordered and, if the heads are +equal, also prove that the tails are ordered. The second co-lemma states +a property that does not always hold; the verifier is not fooled by the +bogus proof attempt and instead reports the property as unproved. + +We argue that these definitions in Dafny are simple enough to level the +playing field between induction (which is familiar) and co-induction +(which, despite being the dual of induction, is often perceived as eerily +mysterious). Moreover, the automation provided by our SMT-based verifier +reduces the tedium in writing co-inductive proofs. For example, it +verifies `Theorem_BelowSquare` from the program text given above— no +additional lemmas or tactics are needed. In fact, as a consequence of the +automatic-induction heuristic in Dafny, the verifier will +automatically verify Theorem_BelowSquare even given an empty body. + +Just like there are restrictions on when an _inductive hypothesis_ can be +invoked, there are restriction on how a _co-inductive_ hypothesis can be +_used_. These are, of course, taken into consideration by our verifier. +For example, as illustrated by the second co-lemma above, invoking the +co-inductive hypothesis in an attempt to obtain the entire proof goal is +futile. (We explain how this works in section [#sec-colemmas]) Our initial experience +with co-induction in Dafny shows it to provide an intuitive, low-overhead +user experience that compares favorably to even the best of today’s +interactive proof assistants for co-induction. In addition, the +co-inductive features and verification support in Dafny have other +potential benefits. The features are a stepping stone for verifying +functional lazy programs with Dafny. Co-inductive features have also +shown to be useful in defining language semantics, as needed to verify +the correctness of a compiler, so this opens the possibility that +such verifications can benefit from SMT automation. + +### Well-Founded Function/Method Definitions +The Dafny programming language supports functions and methods. A _function_ +in Dafny is a mathematical function (i.e., it is well-defined, +deterministic, and pure), whereas a _method_ is a body of statements that +can mutate the state of the program. A function is defined by its given +body, which is an expression. To ensure that function definitions +are mathematically consistent, Dafny insists that recursive calls be well-founded, +enforced as follows: Dafny computes the call graph of functions. The strongly connected +components within it are _clusters_ of mutually recursive definitions arranged in +a DAG. This stratifies the functions so that a call from one cluster in the DAG to a +lower cluster is allowed arbitrarily. For an intra-cluster call, Dafny prescribes a proof +obligation that gets taken through the program verifier’s reasoning engine. Semantically, +each function activation is labeled by a _rank_—a lexicographic tuple determined +by evaluating the function’s **decreases** clause upon invocation of the function. The +proof obligation for an intra-cluster call is thus that the rank of the callee is strictly less +(in a language-defined well-founded relation) than the rank of the caller. Because +these well-founded checks correspond to proving termination of executable code, we +will often refer to them as “termination checks”. The same process applies to methods. + +Lemmas in Dafny are commonly introduced by declaring a method, stating +the property of the lemma in the _postcondition_ (keyword **ensures**) of +the method, perhaps restricting the domain of the lemma by also giving a +_precondition_ (keyword **requires**), and using the lemma by invoking +the method. Lemmas are stated, used, and proved as methods, but +since they have no use at run time, such lemma methods are typically +declared as _ghost_, meaning that they are not compiled into code. The +keyword **lemma** introduces such a method. Control flow statements +correspond to proof techniques—case splits are introduced with if +statements, recursion and loops are used for induction, and method calls +for structuring the proof. Additionally, the statement: +``` +forall x | P(x) { Lemma(x); } +``` +is used to invoke `Lemma(x)` on all `x` for which `P(x)` holds. If +`Lemma(x)` ensures `Q(x)`, then the forall statement establishes +``` +forall x :: P(x) ==> Q(x). +``` + +### Defining Co-inductive Datatypes +Each value of an inductive datatype is finite, in the sense that it can +be constructed by a finite number of calls to datatype constructors. In +contrast, values of a co-inductive datatype, or co-datatype for short, +can be infinite. For example, a co-datatype can be used to represent +infinite trees. + +Syntactically, the declaration of a co-datatype in Dafny looks like that +of a datatype, giving prominence to the constructors (following Coq). The +following example defines a co-datatype Stream of possibly +infinite lists. + +``` +codatatype Stream = SNil | SCons(head: T, tail: Stream) +function Up(n: int): Stream { SCons(n, Up(n+1)) } +function FivesUp(n: int): Stream + decreases 4 - (n - 1) % 5 +{ + if (n % 5 == 0) then + SCons(n, FivesUp(n+1)) + else + FivesUp(n+1) +} +``` + +`Stream` is a co-inductive datatype whose values are possibly infinite +lists. Function `Up` returns a stream consisting of all integers upwards +of `n` and `FivesUp` returns a stream consisting of all multiples of 5 +upwards of `n` . The self-call in `Up` and the first self-call in `FivesUp` +sit in productive positions and are therefore classified as co-recursive +calls, exempt from termination checks. The second self-call in `FivesUp` is +not in a productive position and is therefore subject to termination +checking; in particular, each recursive call must decrease the rank +defined by the **decreases** clause. + +Analogous to the common finite list datatype, Stream declares two +constructors, `SNil` and `SCons`. Values can be destructed using match +expressions and statements. In addition, like for inductive datatypes, +each constructor `C` automatically gives rise to a discriminator `C?` and +each parameter of a constructor can be named in order to introduce a +corresponding destructor. For example, if `xs` is the stream +`SCons(x, ys)`, then `xs.SCons?` and `xs.head == x` hold. In contrast +to datatype declarations, there is no grounding check for +co-datatypes—since a codatatype admits infinite values, the type is +nevertheless inhabited. + +### Creating Values of Co-datatypes +To define values of co-datatypes, one could imagine a “co-function” +language feature: the body of a “co-function” could include possibly +never-ending self-calls that are interpreted by a greatest fix-point +semantics (akin to a **CoFixpoint** in Coq). Dafny uses a different design: +it offers only functions (not “co-functions”), but it classifies each +intra-cluster call as either _recursive_ or _co-recursive_. Recursive calls +are subject to termination checks. Co-recursive calls may be +never-ending, which is what is needed to define infinite values of a +co-datatype. For example, function `Up(n )` in the preceding example is defined as the +stream of numbers from `n` upward: it returns a stream that starts with `n` +and continues as the co-recursive call `Up(n + 1)`. + +To ensure that co-recursive calls give rise to mathematically consistent definitions, +they must occur only in productive positions. This says that it must be possible to determine +each successive piece of a co-datatype value after a finite amount of work. This +condition is satisfied if every co-recursive call is syntactically guarded by a constructor +of a co-datatype, which is the criterion Dafny uses to classify intra-cluster calls as being +either co-recursive or recursive. Calls that are classified as co-recursive are exempt from +termination checks. + +A consequence of the productivity checks and termination checks is that, even in the +absence of talking about least or greatest fix-points of self-calling functions, all functions +in Dafny are deterministic. Since there is no issue of several possible fix-points, +the language allows one function to be involved in both recursive and co-recursive calls, +as we illustrate by the function `FivesUp`. + +### Copredicates +Determining properties of co-datatype values may require an infinite +number of observations. To that avail, Dafny provides _co-predicates_ +which are function declarations that use the `copredicate` keyword. +Self-calls to a co-predicate need not terminate. Instead, the value +defined is the greatest fix-point of the given recurrence equations. +Continuing the preceding example, the following code defines a +co-predicate that holds for exactly those streams whose payload consists +solely of positive integers. The co-predicate definition implicitly also +gives rise to a corresponding prefix predicate, `Pos#`. The syntax for +calling a prefix predicate sets apart the argument that specifies the +prefix length, as shown in the last line; for this figure, we took the +liberty of making up a coordinating syntax for the signature of the +automatically generated prefix predicate (which is not part of +Dafny syntax). + +``` +copredicate Pos(s: Stream) +{ + match s + case SNil => true + case SCons(x, rest) => x > 0 && Pos(rest) +} +// Automatically generated by the Dafny compiler: +predicate Pos#[_k: nat](s: Stream) + decreases _k +{ if _k = 0 then true else + match s + case SNil => true + case SCons(x, rest) => x > 0 && Pos#[_k-1](rest) +} +``` + +Some restrictions apply. To guarantee that the greatest fix-point always +exists, the (implicit functor defining the) co-predicate must be +monotonic. This is enforced by a syntactic restriction on the form of the +body of co-predicates: after conversion to negation normal form (i.e., +pushing negations down to the atoms), intra-cluster calls of +co-predicates must appear only in _positive_ positions—that is, they must +appear as atoms and must not be negated. Additionally, to guarantee +soundness later on, we require that they appear in _co-friendly_ +positions—that is, in negation normal form, when they appear under +existential quantification, the quantification needs to be limited to a +finite range[^fn-copredicate-restriction]. Since the evaluation of a co-predicate might not +terminate, co-predicates are always ghost. There is also a restriction on +the call graph that a cluster containing a co-predicate must contain only +co-predicates, no other kinds of functions. + +[^fn-copredicate-restriction]: Higher-order function support in Dafny is + rather modest and typical reasoning patterns do not involve them, so this + restriction is not as limiting as it would have been in, e.g., Coq. + +A **copredicate** declaration of `P` defines not just a co-predicate, but +also a corresponding _prefix predicate_ `P#`. A prefix predicate is a +finite unrolling of a co-predicate. The prefix predicate is constructed +from the co-predicate by + +* adding a parameter _k of type nat to denote the prefix length, + +* adding the clause "**decreases** `_k;`" to the prefix predicate (the + co-predicate itself is not allowed to have a decreases clause), + +* replacing in the body of the co-predicate every intra-cluster + call `Q(args)` to a copredicate by a call `Q#[_k - 1](args)` + to the corresponding prefix predicate, and then + +* prepending the body with `if _k = 0 then true else`. + +For example, for co-predicate `Pos`, the definition of the prefix +predicate `Pos#` is as suggested above. Syntactically, the prefix-length +argument passed to a prefix predicate to indicate how many times to +unroll the definition is written in square brackets, as in `Pos#[k](s)`. +In the Dafny grammar this is called a ``HashCall``. The definition of +`Pos#` is available only at clusters strictly higher than that of `Pos`; +that is, `Pos` and `Pos#` must not be in the same cluster. In other +words, the definition of `Pos` cannot depend on `Pos#`. + +#### Co-Equality +Equality between two values of a co-datatype is a built-in co-predicate. +It has the usual equality syntax `s == t`, and the corresponding prefix +equality is written `s ==#[k] t`. And similarly for `s != t` +and `s !=#[k] t`. + +### Co-inductive Proofs +From what we have said so far, a program can make use of properties of +co-datatypes. For example, a method that declares `Pos(s)` as a +precondition can rely on the stream `s` containing only positive integers. +In this section, we consider how such properties are established in the +first place. + +#### Properties About Prefix Predicates +Among other possible strategies for establishing co-inductive properties +we take the time-honored approach of reducing co-induction to +induction. More precisely, Dafny passes to the SMT solver an +assumption `D(P)` for every co-predicate `P`, where: + +``` +D(P) = ? x • P(x) <==> ? k • P#[k](x) +``` + +In other words, a co-predicate is true iff its corresponding prefix +predicate is true for all finite unrollings. + +In Sec. 4 of the paper [Co-induction Simply] a soundness theorem of such +assumptions is given, provided the co-predicates meet the co-friendly +restrictions. An example proof of `Pos(Up(n))` for every `n > 0` is +here shown: + +``` +lemma UpPosLemma(n: int) + requires n > 0 + ensures Pos(Up(n)) +{ + forall k | 0 <= k { UpPosLemmaK(k, n); } +} + +lemma UpPosLemmaK(k: nat, n: int) + requires n > 0 + ensures Pos#[k](Up(n)) + decreases k +{ + if k != 0 { + // this establishes Pos#[k-1](Up(n).tail) + UpPosLemmaK(k-1, n+1); + } +} +``` + +The lemma `UpPosLemma` proves `Pos(Up(n))` for every `n > 0`. We first +show `Pos#[k](Up(n ))`, for `n > 0` and an arbitrary `k`, and then use +the forall statement to show `? k • Pos#[k](Up(n))`. Finally, the axiom +`D(Pos)` is used (automatically) to establish the co-predicate. + + +#### Colemmas +As we just showed, with help of the `D` axiom we can now prove a +co-predicate by inductively proving that the corresponding prefix +predicate holds for all prefix lengths `k` . In this section, we introduce +_co-lemma_ declarations, which bring about two benefits. The first benefit +is that co-lemmas are syntactic sugar and reduce the tedium of having to +write explicit quantifications over `k` . The second benefit is that, in +simple cases, the bodies of co-lemmas can be understood as co-inductive +proofs directly. As an example consider the following co-lemma. + +``` +colemma UpPosLemma(n: int) + requires n > 0 + ensures Pos(Up(n)) +{ + UpPosLemma(n+1); +} +``` +This co-lemma can be understood as follows: `UpPosLemma` invokes itself +co-recursively to obtain the proof for `Pos(Up(n).tail)` (since `Up(n).tail` +equals `Up(n+1)`). The proof glue needed to then conclude `Pos(Up(n))` is +provided automatically, thanks to the power of the SMT-based verifier. + +#### Prefix Lemmas +To understand why the above `UpPosLemma` co-lemma code is a sound proof, +let us now describe the details of the desugaring of co-lemmas. In +analogy to how a **copredicate** declaration defines both a co-predicate and +a prefix predicate, a **colemma** declaration defines both a co-lemma and +_prefix lemma_. In the call graph, the cluster containing a co-lemma must +contain only co-lemmas and prefix lemmas, no other methods or function. +By decree, a co-lemma and its corresponding prefix lemma are always +placed in the same cluster. Both co-lemmas and prefix lemmas are always +ghosts. + +The prefix lemma is constructed from the co-lemma by + +* adding a parameter `_k` of type `nat` to denote the prefix length, + +* replacing in the co-lemma’s postcondition the positive co-friendly + occurrences of co-predicates by corresponding prefix predicates, + passing in `_k` as the prefix-length argument, + +* prepending `_k` to the (typically implicit) **decreases** clause of the co-lemma, + +* replacing in the body of the co-lemma every intra-cluster call + `M(args)` to a colemma by a call `M#[_k - 1](args)` to the + corresponding prefix lemma, and then + +* making the body’s execution conditional on `_k != 0`. + +Note that this rewriting removes all co-recursive calls of co-lemmas, +replacing them with recursive calls to prefix lemmas. These recursive +call are, as usual, checked to be terminating. We allow the pre-declared +identifier `_k` to appear in the original body of the +co-lemma.[^fn-co-predicate-co-lemma-diffs] + +[^fn-co-predicate-co-lemma-diffs]: Note, two places where co-predicates + and co-lemmas are not analogous are: co-predicates must not make + recursive calls to their prefix predicates, and co-predicates cannot + mention _k. + +We can now think of the body of the co-lemma as being replaced by a +**forall** call, for every _k_ , to the prefix lemma. By construction, +this new body will establish the colemma’s declared postcondition (on +account of the `D` axiom, and remembering that only the positive +co-friendly occurrences of co-predicates in the co-lemma’s postcondition +are rewritten), so there is no reason for the program verifier to check +it. + +The actual desugaring of our co-lemma `UpPosLemma` is in fact the +previous code for the `UpPosLemma` lemma except that `UpPosLemmaK` is +named `UpPosLemma#` and modulo a minor syntactic difference in how the +`k` argument is passed. + +In the recursive call of the prefix lemma, there is a proof obligation +that the prefixlength argument `_k - 1` is a natural number. +Conveniently, this follows from the fact that the body has been wrapped +in an `if _k != 0` statement. This also means that the postcondition must +hold trivially when `_k = 0`, or else a postcondition violation will be +reported. This is an appropriate design for our desugaring, because +co-lemmas are expected to be used to establish co-predicates, whose +corresponding prefix predicates hold trivially when `_k = 0`. (To prove +other predicates, use an ordinary lemma, not a co-lemma.) + +It is interesting to compare the intuitive understanding of the +co-inductive proof in using a co-lemma with the inductive proof in using +the lemma. Whereas the inductive proof is performing proofs for deeper +and deeper equalities, the co-lemma can be understood as producing the +infinite proof on demand. + +# Newtypes +```` +NewtypeDecl = "newtype" { Attribute } NewtypeName "=" + ( NumericTypeName [ ":" Type ] "|" Expression(allowLemma: false, allowLambda: true) + | Type + ) +```` + +A new numeric type can be declared with the _newtype_ +declaration[^fn-newtype-name], for example: +``` +newtype N = x: M | Q +``` +where `M` is a numeric type and `Q` is a boolean expression that can +use `x` as a free variable. If `M` is an integer-based numeric type, +then so is `N`; if `M` is real-based, then so is `N`. If the type `M` +can be inferred from `Q`, the "`: M`" can be omitted. If `Q` is just +`true`, then the declaration can be given simply as: +``` +newtype N = M +``` +Type `M` is known as the _base type_ of `N`. + +[^fn-newtype-name]: Should `newtype` perhaps be renamed to `numtype`? + +A newtype is a numeric type that supports the same operations as its +base type. The newtype is distinct from and incompatible with other +numeric types; in particular, it is not assignable to its base type +without an explicit conversion. An important difference between the +operations on a newtype and the operations on its base type is that +the newtype operations are defined only if the result satisfies the +predicate `Q`, and likewise for the literals of the +newtype.[^fn-newtype-design-question] + +[^fn-newtype-design-question]: Would it be useful to also + automatically define `predicate N?(m: M) { Q }`? + +For example, suppose `lo` and `hi` are integer-based numerics that +satisfy `0 <= lo <= hi` and consider the following code fragment: +``` +var mid := (lo + hi) / 2; +``` +If `lo` and `hi` have type `int`, then the code fragment is legal; in +particular, it never overflows, since `int` has no upper bound. In +contrast, if `lo` and `hi` are variables of a newtype `int32` declared +as follows: +``` +newtype int32 = x | -0x80000000 <= x < 0x80000000 +``` +then the code fragment is erroneous, since the result of the addition +may fail to satisfy the predicate in the definition of `int32`. The +code fragment can be rewritten as +``` +var mid := lo + (hi - lo) / 2; +``` +in which case it is legal for both `int` and `int32`. + +Since a newtype is incompatible with its base type and since all +results of the newtype's operations are members of the newtype, a +compiler for Dafny is free to specialize the run-time representation +of the newtype. For example, by scrutinizing the definition of +`int32` above, a compiler may decide to store `int32` values using +signed 32-bit integers in the target hardware. + +Note that the bound variable `x` in `Q` has type `M`, not `N`. +Consequently, it may not be possible to state `Q` about the `N` +value. For example, consider the following type of 8-bit 2's +complement integers: +``` +newtype int8 = x: int | -128 <= x < 128 +``` +and consider a variable `c` of type `int8`. The expression +``` +-128 <= c < 128 +``` +is not well-defined, because the comparisons require each operand to +have type `int8`, which means the literal `128` is checked to be of +type `int8`, which it is not. A proper way to write this expression +would be to use a conversion operation, described next, on `c` to +convert it to the base type: +``` +-128 <= int(c) < 128 +``` + +If possible Dafny will represent values of the newtype using +a native data type for the sake of efficiency. This action can +be inhibited or a specific native data type selected by +using the `(:nativeType)` attribute, as explained in +section [#sec-nativetype]. + +There is a restriction that the value `0` must be part of every +newtype.[^fn-newtype-zero] + +[^fn-newtype-zero]: The restriction is due to a current limitation in + the compiler. This will change in the future and will also open + up the possibility for subset types and non-null reference + types. + +## Numeric conversion operations + +For every numeric type `N`, there is a conversion function with the +same name. It is a partial identity function. It is defined when the +given value, which can be of any numeric type, is a member of the type +converted to. When the conversion is from a real-based numeric type +to an integer-based numeric type, the operation requires that the +real-based argument has no fractional part. (To round a real-based +numeric value down to the nearest integer, use the `.Trunc` member, +see Section [#sec-numeric-types].) + +To illustrate using the example from above, if `lo` and `hi` have type +`int32`, then the code fragment can legally be written as follows: +``` +var mid := (int(lo) + int(hi)) / 2; +``` +where the type of `mid` is inferred to be `int`. Since the result +value of the division is a member of type `int32`, one can introduce +yet another conversion operation to make the type of `mid` be `int32`: +``` +var mid := int32((int(lo) + int(hi)) / 2); +``` +If the compiler does specialize the run-time representation for +`int32`, then these statements come at the expense of two, +respectively three, run-time conversions. + +# Subset types +```` +NatType_ = "nat" +```` + +A _subset type_ is a restricted use of an existing type, called +the _base type_ of the subset type. A subset type is like a +combined use of the base type and a predicate on the base +type. + +An assignment from a subset type to its base type is always +allowed. An assignment in the other direction, from the base type to +a subset type, is allowed provided the value assigned does indeed +satisfy the predicate of the subset type. +(Note, in contrast, assignments between a newtype and its base type +are never allowed, even if the value assigned is a value of the target +type. For such assignments, an explicit conversion must be used, see +Section [#sec-numeric-conversion-operations].) + +Dafny supports one subset type, namely the built-in type `nat`, +whose base type is `int`.[^fn-more-subset-types] Type `nat` +designates the non-negative subrange of `int`. A simple example that +puts subset type `nat` to good use is the standard Fibonacci +function: +``` +function Fib(n: nat): nat +{ + if n < 2 then n else Fib(n-2) + Fib(n-1) +} +``` +An equivalent, but clumsy, formulation of this function (modulo the +wording of any error messages produced at call sites) would be to use +type `int` and to write the restricting predicate in pre- and +postconditions: +``` +function Fib(n: int): int + requires 0 <= n; // the function argument must be non-negative + ensures 0 <= Fib(n); // the function result is non-negative +{ + if n < 2 then n else Fib(n-2) + Fib(n-1) +} +``` + +[^fn-more-subset-types]: A future version of Dafny will support + user-defined subset types. + +Type inference will never infer the type of a variable to be a +subset type. It will instead infer the type to be the base type +of the subset type. For example, the type of `x` in +``` +forall x :: P(x) +``` +will be `int`, even if predicate `P` declares its argument to have +type `nat`. + +# Statements +```` +Stmt = ( BlockStmt | AssertStmt | AssumeStmt | PrintStmt | UpdateStmt + | VarDeclStatement | IfStmt | WhileStmt | MatchStmt | ForallStmt + | CalcStmt | ModifyStmt | LabeledStmt_ | BreakStmt_ | ReturnStmt + | YieldStmt | SkeletonStmt + ) +```` +Many of Dafny's statements are similar to those in traditional +programming languages, but a number of them are significantly different. +This grammar production shows the different kinds of Dafny statements. +They are described in subsequent sections. + +## Labeled Statement +```` +LabeledStmt_ = "label" LabelName ":" Stmt +```` +A labeled statement is just the keyword `label` followed by and +identifier which is the label followed by a colon and a +statement. The label may be referenced in a break statement +to transfer control to the location after that statement. + +## Break Statement +```` +BreakStmt_ = "break" ( LabelName | { "break" } ) ";" +```` +A break statement breaks out of one or more loops (if the +statement consists solely of one or more `break` keywords), +or else transfers control to just past the statement +bearing the referenced label, if a label was used. + +## Block Statement +```` +BlockStmt = "{" { Stmt } "}" +```` +A block statement is just a sequence of statements enclosed by curly braces. + +## Return Statement +```` +ReturnStmt = "return" [ Rhs { "," Rhs } ] ";" +```` +A return statement can only be used in a method. It is used +to terminate the execution of the method. +To return a value from a method, the value is assigned to one +of the named return values sometime before a return statement. +In fact, the return values act very much like local variables, +and can be assigned to more than once. Return statements are +used when one wants to return before reaching the end of the +body block of the method. Return statements can be just the +return keyword (where the current value of the out parameters +are used), or they can take a list of values to return. +If a list is given the number of values given must be the +same as the number of named return values. + +## Yield Statement +```` +YieldStmt = "yield" [ Rhs { "," Rhs } ] ";" +```` + +A yield statement can only be used in an iterator. +See section [Iterator types](#sec-iterator-types) for more details +about iterators. + +The body of an iterator is a _co-routine_. It is used +to yield control to its caller, signaling that a new +set of values for the iterator's yield parameters (if any) +are available. Values are assigned to the yield parameters +at or before a yield statement. +In fact, the yield parameters act very much like local variables, +and can be assigned to more than once. Yield statements are +used when one wants to return new yield parameter values +to the caller. Yield statements can be just the +**yield** keyword (where the current value of the yield parameters +are used), or they can take a list of values to yield. +If a list is given the number of values given must be the +same as the number of named return yield parameters. + +## Update Statement +```` +UpdateStmt = Lhs { "," Lhs } + ( ":=" Rhs { "," Rhs } + | ":|" [ "assume" ] Expression(allowLemma: false, allowLambda: true) + ) + ";"" +```` + +The update statement has two forms. The first more normal form +allows for parallel assignment of right-hand-side values to the +left-hand side. For example `x,y := y,x` to swap the values +of `x` and `y`. Of course the common case will have only one +rhs and one lhs. + +The form that uses "`:|`" assigns some values to the left-hand-side +variables such that the boolean expression on the right hand side +is satisfied. This can be used to make a choice as in the +following example where we choose an element in a set. + +``` +function PickOne(s: set): T + requires s != {} +{ + var x :| x in s; x +} +``` + +Dafny will report an error if it cannot prove that values +exist which satisfy the condition. + +In addition, though the choice is arbitrary, given identical +circumstances the choice will be made consistently. + + +In the actual grammar two additional forms are recognized for +purposes of error detection. The form: + +```` +Lhs { Attribute} ; +```` + +is assumed to be a mal-formed call. + +The form + +```` +Lhs ":" +```` + +is diagnosed as a label in which the user forgot the **label** keyword. + +## Variable Declaration Statement +```` +VarDeclStatement = [ "ghost" ] "var" { Attribute } + ( + LocalIdentTypeOptional { "," { Attribute } LocalIdentTypeOptional } + [ ":=" Rhs { "," Rhs } + | { Attribute } ":|" [ "assume" ] Expression(allowLemma: false, allowLambda: true) + ] + | + "(" CasePattern { "," CasePattern } ")" + ":=" Expression(allowLemma: false, allowLambda: true) + ) + ";" +```` + +A ``VarDeclStatement`` is used to declare one or more local variables in a method or function. +The type of each local variable must be given unless the variable is given an initial +value in which case the type will be inferred. If initial values are given, the number of +values must match the number of variables declared. + +Note that the type of each variable must be given individually. The following code + +``` +var x, y : int; +``` +does not declare both `x` and `y` to be of type `int`. Rather it will give an +error explaining that the type of `x` is underspecified. + +The lefthand side can also contain a tuple of patterns which will be +matched against the right-hand-side. For example: + +``` +function returnsTuple() : (int, int) +{ + (5, 10) +} + +function usesTuple() : int +{ + var (x, y) := returnsTuple(); + x + y +} +``` + +## Guards +```` +Guard = ( "*" | "(" "*" ")" | Expression(allowLemma: true, allowLambda: true) ) +```` +Guards are used in `if` and `while` statements as boolean expressions. Guards +take two forms. + +The first and most common form is just a boolean expression. + +The second form is either `*` or `(*)`. These have the same meaning. An +unspecified boolean value is returned. The value returned +may be different each time it is executed. + +## Binding Guards +```` +BindingGuard(allowLambda) = + IdentTypeOptional { "," IdentTypeOptional } { Attribute } + ":|" Expression(allowLemma: true, allowLambda) +```` + +A ``BindingGuard`` is used as a condition in an ``IfStmt``. +It binds the identifiers declared in the ``IdentTypeOptional``s. +If there exists one or more assignments of values to the bound identifiers +for which ``Expression`` is true, then the ``BindingGuard`` +returns true and the identifiers are bound to values that make the +``Expression`` true. + +The identifiers bound by ``BindingGuard`` are ghost variables +and cannot be assigned to non-ghost variables. They are only +used in specification contexts. + +Here is an example: + +``` +predicate P(n: int) +{ + n % 2 == 0 +} + +method M1() returns (ghost y: int) + requires exists x :: P(x) + ensures P(y) +{ + if x : int :| P(x) { + y := x; + } +} +``` + +## If Statement +```` +IfStmt = "if" + ( IfAlternativeBlock + | + ( BindingGuard(allowLambda: true) + | Guard + | "..." + ) + BlockStmt [ "else" ( IfStmt | BlockStmt ) ] + ) +```` + +In the simplest form an `if` statement uses a guard that is a boolean +expression. It then has the same form as in C# and other common +programming languages. For example + +``` + if x < 0 { + x := -x; + } +``` + +If the guard is an asterisk then a non-deterministic choice is made: + +``` + if * { + print "True"; + } else { + print "False"; + } +``` + +```` +IfAlternativeBlock = + "{" { "case" + ( + BindingGuard(allowLambda:false) + | Expression(allowLemma: true, allowLambda: false) + ) "=>" { Stmt } } "}" . +```` + +The `if` statement using the `IfAlternativeBlock` form is similar to the +`if ... fi` construct used in the book "A Discipline of Programming" by +Edsger W. Dijkstra. It is used for a multi-branch `if`. + +For example: +``` + if { + case x <= y => max := y; + case y <= x => max := y; + } +``` + +In this form the expressions following the `case` keyword are called +_guards_. The statement is evaluated by evaluating the guards in an +undetermined order until one is found that is `true` or else all have +evaluated to `false`. If none of them evaluate to `true` then the `if` +statement does nothing. Otherwise the statements to the right of `=>` +for the guard that evaluated to `true` are executed. + +## While Statement +```` +WhileStmt = "while" + ( LoopSpecWhile WhileAlternativeBlock + | ( Guard | "..." ) LoopSpec + ( BlockStmt + | "..." + | /* go body-less */ + ) + ) +```` + +```` +WhileAlternativeBlock = + "{" { "case" Expression(allowLemma: true, allowLambda: false) "=>" { Stmt } } "}" . +```` + +See section [#sec-loop-specification] for a description of ``LoopSpec``. + +The `while` statement is Dafny's only loop statement. It has two general +forms. + +The first form is similar to a while loop in a C-like language. For +example: + +``` + var i := 0; + while i < 5 { + i := i + 1; + } +``` + +In this form the condition following the `while` is one of these: + +* A boolean expression. If true it means execute one more +iteration of the loop. If false then terminate the loop. +* An asterisk (`*`), meaning non-deterministically yield either +`true` or `false` as the value of the condition +* An ellipsis (`...`), which makes the while statement a _skeleton_ +`while` statement. TODO: What does that mean? + +The _body_ of the loop is usually a block statement, but it can also +be a _skeleton_, denoted by ellipsis, or missing altogether. +TODO: Wouldn't a missing body cause problems? Isn't it clearer to have +a block statement with no statements inside? + +The second form uses the `WhileAlternativeBlock`. It is similar to the +`do ... od` construct used in the book "A Discipline of Programming" by +Edsger W. Dijkstra. For example: + +``` + while + decreases if 0 <= r then r else -r; + { + case r < 0 => + r := r + 1; + case 0 < r => + r := r - 1; + } +``` +For this form the guards are evaluated in some undetermined order +until one is found that is true, in which case the corresponding statements +are executed. If none of the guards evaluates to true then the +loop execution is terminated. + +### Loop Specifications +For some simple loops such as those mentioned previously Dafny can figure +out what the loop is doing without more help. However in general the user +must provide more information in order to help Dafny prove the effect of +the loop. This information is provided by a ``LoopSpec``. A +``LoopSpec`` provides information about invariants, termination, and +what the loop modifies. ``LoopSpecs`` are explained in +section [#sec-loop-specification]. However the following sections +present additional rationale and tutorial on loop specifications. + +#### Loop Invariants + +`While` loops present a problem for Dafny. There is no way for Dafny to +know in advance how many times the code will go around the loop. But +Dafny needs to consider all paths through a program, which could include +going around the loop any number of times. To make it possible for Dafny +to work with loops, you need to provide loop invariants, another kind of +annotation. + +A loop invariant is an expression that holds upon entering a loop, and +after every execution of the loop body. It captures something that is +invariant, i.e. does not change, about every step of the loop. Now, +obviously we are going to want to change variables, etc. each time around +the loop, or we wouldn't need the loop. Like pre- and postconditions, an +invariant is a property that is preserved for each execution of the loop, +expressed using the same boolean expressions we have seen. For example, + +``` +var i := 0; +while i < n + invariant 0 <= i +{ + i := i + 1; +} +``` + +When you specify an invariant, Dafny proves two things: the invariant +holds upon entering the loop, and it is preserved by the loop. By +preserved, we mean that assuming that the invariant holds at the +beginning of the loop, we must show that executing the loop body once +makes the invariant hold again. Dafny can only know upon analyzing the +loop body what the invariants say, in addition to the loop guard (the +loop condition). Just as Dafny will not discover properties of a method +on its own, it will not know any but the most basic properties of a loop +are preserved unless it is told via an invariant. + +#### Loop Termination + +Dafny proves that code terminates, i.e. does not loop forever, by using +`decreases` annotations. For many things, Dafny is able to guess the right +annotations, but sometimes it needs to be made explicit. In fact, for all +of the code we have seen so far, Dafny has been able to do this proof on +its own, which is why we haven't seen the decreases annotation explicitly +yet. There are two places Dafny proves termination: loops and recursion. +Both of these situations require either an explicit annotation or a +correct guess by Dafny. + +A `decreases` annotation, as its name suggests, gives Dafny an expression +that decreases with every loop iteration or recursive call. There are two +conditions that Dafny needs to verify when using a decreases expression: + +* that the expression actually gets smaller, and +* that it is bounded. + +Many times, an integral value (natural or plain integer) is the quantity +that decreases, but other things that can be used as well. In the case of +integers, the bound is assumed to be zero. For example, the following is +a proper use of decreases on a loop (with its own keyword, of course): + +``` + while 0 < i + invariant 0 <= i + decreases i + { + i := i - 1; + } +``` + +Here Dafny has all the ingredients it needs to prove termination. The +variable i gets smaller each loop iteration, and is bounded below by +zero. This is fine, except the loop is backwards from most loops, which +tend to count up instead of down. In this case, what decreases is not the +counter itself, but rather the distance between the counter and the upper +bound. A simple trick for dealing with this situation is given below: + +``` + while i < n + invariant 0 <= i <= n + decreases n - i + { + i := i + 1; + } +``` + +This is actually Dafny's guess for this situation, as it sees `i < n` and +assumes that `n - i` is the quantity that decreases. The upper bound of the +loop invariant implies that `0 <= n – i`, and gives Dafny a lower bound on +the quantity. This also works when the bound `n` is not constant, such as +in the binary search algorithm, where two quantities approach each other, +and neither is fixed. + +If the **decreases** clause of a loop specified "*", then no +termination check will be performed. Use of this feature is sound only with +respect to partial correctness. + +#### Loop Framing +In some cases we also must specify what memory locations the loop body +is allowed to modify. This is done using a `modifies` clause. +See the discussion of framing in methods for a fuller discussion. + +## Match Statement +```` +MatchStmt = "match" Expression(allowLemma: true, allowLambda: true) + ( "{" { CaseStatement } "}" + | { CaseStatement } + ) + +CaseStatement = CaseBinding_ "=>" { Stmt } +```` + +The `match` statement is used to do case analysis on a value of inductive +or co-inductive type. The form with no leading ``Ident`` is for matching +tuples. The expression after the `match` keyword is the (co)inductive +value being matched. The expression is evaluated and then matched against +each of the case clauses. + +There must be a case clause for each constructor of the data type. +The identifier after the `case` keyword in a case clause, if present, +must be the name of one of the data type's constructors. +If the constructor takes parameters then a parenthesis-enclosed +list of identifiers (with optional type) must follow the +constructor. There must be as many identifiers as the constructor +has parameters. If the optional type is given it must be the same +as the type of the corresponding parameter of the constructor. +If no type is given then the type of the corresponding parameter +is the type assigned to the identifier. + +When an inductive value that was created using constructor +expression `C1(v1, v2)` is matched against a case clause +`C2(x1, x2`), there is a match provided that `C1` and `C2` are the +same constructor. In that case `x1` is bound to value `v1` and +`x2` is bound to `v2`. The identifiers in the case pattern +are not mutable. Here is an example of the use of a `match` statement. + +``` +datatype Tree = Empty | Node(left: Tree, data: int, right: Tree) + +// Return the sum of the data in a tree. +method Sum(x: Tree) returns (r: int) +{ + match x { + case Empty => r := -1; + case Node(t1 : Tree, d, t2) => { + var v1 := Sum(t1); + var v2 := Sum(t2); + r := v1 + d + v2; + } + } +} +``` + +Note that the `Sum` method is recursive yet has no `decreases` annotation. +In this case it is not needed because Dafny is able to deduce that +`t1` and `t2` are _smaller_ (structurally) than `x`. If `Tree` had been +coinductive this would not have been possible since `x` might have been +infinite. + +## Assert Statement +```` +AssertStmt = + "assert" { Attribute } + ( Expression(allowLemma: false, allowLambda: true) + | "..." + ) ";" +```` + +`Assert` statements are used to express logical proposition that are +expected to be true. Dafny will attempt to prove that the assertion +is true and give an error if not. Once it has proved the assertion +it can then use its truth to aid in following deductions. +Thus if Dafny is having a difficult time verifying a method +the user may help by inserting assertions that Dafny can prove, +and whose true may aid in the larger verification effort. + +If the proposition is `...` then (TODO: what does this mean?). + +## Assume Statement +```` +AssumeStmt = + "assume" { Attribute } + ( Expression(allowLemma: false, allowLambda: true) + | "..." + ) ";" +```` + +The `Assume` statement lets the user specify a logical proposition +that Dafny may assume to be true without proof. If in fact the +proposition is not true this may lead to invalid conclusions. + +An `Assume` statement would ordinarily be used as part of a larger +verification effort where verification of some other part of +the program required the proposition. By using the `Assume` statement +the other verification can proceed. Then when that is completed the +user would come back and replace the `assume` with `assert`. + +If the proposition is `...` then (TODO: what does this mean?). + +## Print Statement +```` +PrintStmt = + "print" Expression(allowLemma: false, allowLambda: true) + { "," Expression(allowLemma: false, allowLambda: true) } ";" +```` + +The `print` statement is used to print the values of a comma-separated +list of expressions to the console. The generated C# code uses +the `System.Object.ToString()` method to convert the values to printable +strings. The expressions may of course include strings that are used +for captions. There is no implicit new line added, so to get a new +line you should include "\n" as part of one of the expressions. +Dafny automatically creates overrides for the ToString() method +for Dafny data types. For example, + +``` +datatype Tree = Empty | Node(left: Tree, data: int, right: Tree) +method Main() +{ + var x : Tree := Node(Node(Empty, 1, Empty), 2, Empty); + print "x=", x, "\n"; +} +``` + +produces this output: + +``` +x=Tree.Node(Tree.Node(Tree.Empty, 1, Tree.Empty), 2, Tree.Empty) +``` + +## Forall Statement +```` +ForallStmt = "forall" + ( "(" [ QuantifierDomain ] ")" + | [ QuantifierDomain ] + ) + { [ "free" ] ForAllEnsuresClause_ } + [ BlockStmt ] +```` + +The `forall` statement executes ensures expressions or a body in +parallel for all quantified values in the specified range. +The use of the `parallel` keyword is deprecated. Use +`forall` instead. There are several variant uses of the `forall` +statement. And there are a number of restrictions. + +In particular a `forall` statement can be classified as one of the following: + +* _Assign_ - the `forall` statement is used for simultaneous assignment. +The target must be an array element or an object field. +* _Call_ - The body consists of a single call to a method without side effects +* _Proof_ - The `forall` has `ensure` expressions which are effectively +quantified or proved by the body (if present). + +An _assign_ `forall` statement is to perform simultaneous assignment. +The following is an excerpt of an example given by Leino in +[Developing Verified Programs with Dafny][leino233]. +When the buffer holding the queue needs to be resized, +the `forall` statement is used to simultaneously copy the old contents +into the new buffer. + +[leino233]: http://research.microsoft.com/en-us/um/people/leino/papers/krml233.pdf + +``` +class {:autocontracts} SimpleQueue +{ + ghost var Contents: seq; + var a: array; // Buffer holding contents of queue. + var m: int // Index head of queue. + var n: int; // Index just past end of queue + ... + method Enqueue(d: Data) + ensures Contents == old(Contents) + [d] + { + if n == a.Length { + var b := a; + if m == 0 { b := new Data[2 * a.Length]; } + forall (i | 0 <= i < n - m) { + b[i] := a[m + i]; + } + a, m, n := b, 0, n - m; + } + a[n], n, Contents := d, n + 1, Contents + [d]; + } +} +``` + +Here is an example of a _call_ `forall` statement and the +callee. This is contained in the CloudMake-ConsistentBuilds.dfy +test in the Dafny repository. + +``` +forall (cmd', deps', e' | Hash(Loc(cmd', deps', e')) == Hash(Loc(cmd, deps, e))) { + HashProperty(cmd', deps', e', cmd, deps, e); +} + +ghost method HashProperty(cmd: Expression, deps: Expression, ext: string, + cmd': Expression, deps': Expression, ext': string) + requires Hash(Loc(cmd, deps, ext)) == Hash(Loc(cmd', deps', ext')) + ensures cmd == cmd' && deps == deps' && ext == ext' +``` + +From the same file here is an example of a _proof_ `forall` statement. + +``` +forall (p | p in DomSt(stCombinedC.st) && p in DomSt(stExecC.st)) + ensures GetSt(p, stCombinedC.st) == GetSt(p, stExecC.st) +{ + assert DomSt(stCombinedC.st) <= DomSt(stExecC.st); + assert stCombinedC.st == Restrict(DomSt(stCombinedC.st), stExecC.st); +} +``` + +More generally the statement +``` +forall x | P(x) { Lemma(x); } +``` +is used to invoke `Lemma(x)` on all `x` for which `P(x)` holds. If +`Lemma(x)` ensures `Q(x)`, then the forall statement establishes +``` +forall x :: P(x) ==> Q(x). +``` + +The `forall` statement is also used extensively in the desugared forms of +co-predicates and co-lemmas. See section [#sec-co-inductive-datatypes]. + +TODO: List all of the restrictions on the `forall` statement. + +## Modify Statement +```` +ModifyStmt = + "modify" { Attribute } + ( FrameExpression(allowLemma: false, allowLambda: true) + { "," FrameExpression(allowLemma: false, allowLambda: true) } + | "..." + ) + ( BlockStmt | ";" ) +```` + +The `modify` statement has two forms which have two different +purposes. + +When the `modify` statement ends with a semi-colon rather than +a block statement its effect is to say that some undetermined +modifications have been made to any or all of the memory +locations specified by the [frame expressions](#sec-frame-expressions). +In the following example, a value is assigned to field `x` +followed by a `modify` statement that may modify any field +in the object. After that we can no longer prove that the field +`x` still has the value we assigned to it. + +``` +class MyClass { + var x: int; + method N() + modifies this + { + x := 18; + modify this; + assert x == 18; // error: cannot conclude this here + } +} +``` + +When the `modify` statement is followed by a block statement +we are instead specifying what can be modified in that +block statement. Namely, only memory locations specified +by the frame expressions of the block `modify` statement +may be modified. Consider the following example. + +``` +class ModifyBody { + var x: int; + var y: int; + method M0() + modifies this + { + modify {} { + x := 3; // error: violates modifies clause of the modify statement + } + } + method M1() + modifies this + { + modify {} { + var o := new ModifyBody; + o.x := 3; // fine + } + } + method M2() + modifies this + { + modify this { + x := 3; + } + } +} +``` + +The first `modify` statement in the example has an empty +frame expression so it cannot modify any memory locations. +So an error is reported when it tries to modify field `x`. + +The second `modify` statement also has an empty frame +expression. But it allocates a new object and modifies it. +Thus we see that the frame expressions on a block `modify` +statement only limits what may be modified of existing +memory. It does not limit what may be modified in +new memory that is allocated. + +The third `modify` statement has a frame expression that +allows it to modify any of the fields of the current object, +so the modification of field `x` is allowed. + +## Calc Statement +```` +CalcStmt = "calc" { Attribute } [ CalcOp ] "{" CalcBody "}" +CalcBody = { CalcLine [ CalcOp ] Hints } +CalcLine = Expression(allowLemma: false, allowLambda: true) ";" +Hints = { ( BlockStmt | CalcStmt ) } +CalcOp = + ( "==" [ "#" "[" Expression(allowLemma: true, allowLambda: true) "]" ] + | "<" | ">" + | "!=" | "<=" | ">=" + | "<==>" | "==>" | "<==" + ) +```` + +The `calc` statement supports _calculational proofs_ using a language feature called _program-oriented calculations_ (poC). This feature was introduced and explained in the [Verified Calculations] paper by +Leino and Polikarpova[@LEINO:Dafny:Calc]. Please see that paper for a more complete explanation +of the `calc` statement. We here mention only the highlights. + +[Verified Calculations]: http://research.microsoft.com/en-us/um/people/leino/papers/krml231.pdf + +Calculational proofs are proofs by stepwise formula manipulation +as is taught in elementary algebra. The typical example is to prove +an equality by starting with a left-hand-side, and through a series of +transformations morph it into the desired right-hand-side. + +Non-syntactic rules further restrict hints to only ghost and side-effect +free statements, as well as impose a constraint that only +chain-compatible operators can be used together in a calculation. The +notion of chain-compatibility is quite intuitive for the operators +supported by poC; for example, it is clear that "<" and ">" cannot be used within +the same calculation, as there would be no relation to conclude between +the first and the last line. See the [paper][Verified Calculations] for +a more formal treatment of chain-compatibility. + +Note that we allow a single occurrence of the intransitive operator "!=" to +appear in a chain of equalities (that is, "!=" is chain-compatible with +equality but not with any other operator, including itself). Calculations +with fewer than two lines are allowed, but have no effect. If a step +operator is omitted, it defaults to the calculation-wide operator, +defined after the `calc` keyword. If that operator if omitted, it defaults +to equality. + +Here is an example using `calc` statements to prove an elementary +algebraic identity. As it turns out Dafny is able to prove this without +the `calc` statements, but it helps to illustrate the syntax. + +``` +lemma docalc(x : int, y: int) + ensures (x + y) * (x + y) == x * x + 2 * x * y + y * y +{ + calc { + (x + y) * (x + y); == + // distributive law: (a + b) * c == a * c + b * c + x * (x + y) + y * (x + y); == + // distributive law: a * (b + c) == a * b + a * c + x * x + x * y + y * x + y * y; == + calc { + y * x; == + x * y; + } + x * x + x * y + x * y + y * y; == + calc { + x * y + x * y; == + // a = 1 * a + 1 * x * y + 1 * x * y; == + // Distributive law + (1 + 1) * x * y; == + 2 * x * y; + } + x * x + 2 * x * y + y * y; + } +} +``` + +Here we started with `(x + y) * (x + y)` as the left-hand-side +expressions and gradually transformed it using distributive, +commutative and other laws into the desired right-hand-side. + +The justification for the steps are given as comments, or as +nested `calc` statements that prove equality of some sub-parts +of the expression. + +The `==` to the right of the semicolons show the relation between +that expression and the next. Because of the transitivity of +equality we can then conclude that the original left-hand-side is +equal to the final expression. + +We can avoid having to supply the relational operator between +every pair of expressions by giving a default operator between +the `calc` keyword and the opening brace as shown in this abbreviated +version of the above calc statement: + +``` +calc == { + (x + y) * (x + y); + x * (x + y) + y * (x + y); + x * x + x * y + y * x + y * y; + x * x + x * y + x * y + y * y; + x * x + 2 * x * y + y * y; +} +``` + +And since equality is the default operator we could have omitted +it after the `calc` keyword. +The purpose of the block statements or the `calc` statements between +the expressions is to provide hints to aid Dafny in proving that +step. As shown in the example, comments can also be used to aid +the human reader in cases where Dafny can prove the step automatically. + +## Skeleton Statement +```` +SkeletonStmt = + "..." + ["where" Ident {"," Ident } ":=" + Expression(allowLemma: false, allowLambda: true) + {"," Expression(allowLemma: false, allowLambda: true) } + ] ";" +```` + +# Expressions +The grammar of Dafny expressions follows a hierarchy that +reflects the precedence of Dafny operators. The following +table shows the Dafny operators and their precedence +in order of increasing binding power. + ++--------------------------+------------------------------------+ +| operator | description | ++--------------------------+------------------------------------+ +| `;` | In LemmaCall;Expression | ++--------------------------+------------------------------------+ +| `<==>`, ⇔ | equivalence (if and only if) | ++--------------------------+------------------------------------+ +| `==>`, ⇒ | implication (implies) | +| `<==`, ⇐ | reverse implication (follows from) | ++--------------------------+------------------------------------+ +| `&&`, ∧ | conjunction (and) | +| [\|\|]{.monospace}, ∨ | disjunction (or) | ++--------------------------+------------------------------------+ +| `!`, ¬ | negation (not) | ++--------------------------+------------------------------------+ +| `==` | equality | +| `==#[k]` | prefix equality (co-inductive) | +| `!=` | disequality | +| `!=#[k]` | prefix disequality (co-inductive) | +| [<]{.monospace} | less than | +| `<=` | at most | +| `>=` | at least | +| `>` | greater than | +| `in` | collection membership | +| `!in` | collection non-membership | +| `!!` | disjointness | ++--------------------------+------------------------------------+ +| `+` | addition (plus) | +| `-` | subtraction (minus) | ++--------------------------+------------------------------------+ +| `*` | multiplication (times) | +| `/` | division (divided by) | +| `%` | modulus (mod) | ++--------------------------+------------------------------------+ +| `-` | arithmetic negation (unary minus) | +| `!`, ¬ | logical negation | +| Primary Expressions | | ++--------------------------+------------------------------------+ + +We are calling the ``UnaryExpression``s that are neither +arithmetic nor logical negation the _primary expressions_. +They are the most tightly bound. + +In the grammar entries below we explain the meaning when the +operator for that precedence level is present. If the +operator is not present then we just descend to the +next precedence level. + +## Top-level expressions +```` +Expression(allowLemma, allowLambda) = + EquivExpression(allowLemma, allowLambda) + [ ";" Expression(allowLemma, allowLambda) ] +```` + +The "allowLemma" argument says whether or not the expression +to be parsed is allowed to have the form S;E where S is a call to a lemma. +"allowLemma" should be passed in as "false" whenever the expression to +be parsed sits in a context that itself is terminated by a semi-colon. + +The "allowLambda" says whether or not the expression to be parsed is +allowed to be a lambda expression. More precisely, an identifier or +parenthesized-enclosed comma-delimited list of identifiers is allowed to +continue as a lambda expression (that is, continue with a "reads", "requires", +or "=>") only if "allowLambda" is true. This affects function/method/iterator +specifications, if/while statements with guarded alternatives, and expressions +in the specification of a lambda expression itself. + +Sometimes an expression will fail unless some relevant fact is known. +In the following example the `F_Fails` function fails to verify +because the `Fact(n)` divisor may be zero. But preceding +the expression by a lemma that ensures that the denominator +is not zero allows function `F_Succeeds` to succeed. +``` +function Fact(n: nat): nat +{ + if n == 0 then 1 else n * Fact(n-1) +} + +lemma L(n: nat) + ensures 1 <= Fact(n) +{ +} + +function F_Fails(n: nat): int +{ + 50 / Fact(n) // error: possible division by zero +} + +function F_Succeeds(n: nat): int +{ + L(n); + 50 / Fact(n) +} +``` + +## Equivalence Expressions +```` +EquivExpression(allowLemma, allowLambda) = + ImpliesExpliesExpression(allowLemma, allowLambda) + { "<==>" ImpliesExpliesExpression(allowLemma, allowLambda) } +```` +An ``EquivExpression`` that contains one or more "<==>"s is +a boolean expression and all the contained ``ImpliesExpliesExpression`` +must also be boolean expressions. In that case each "<==>" +operator tests for logical equality which is the same as +ordinary equality. + +See section [#sec-equivalence-operator] for an explanation of the +`<==>` operator as compared with the `==` operator. + +## Implies or Explies Expressions +```` +ImpliesExpliesExpression(allowLemma, allowLambda) = + LogicalExpression(allowLemma, allowLambda) + [ ( "==>" ImpliesExpression(allowLemma, allowLambda) + | "<==" LogicalExpression(allowLemma, allowLambda) + { "<==" LogicalExpression(allowLemma, allowLambda) } + ) + ] + +ImpliesExpression(allowLemma, allowLambda) = + LogicalExpression(allowLemma, allowLambda) + [ "==>" ImpliesExpression(allowLemma, allowLambda) ] +```` + +See section [#sec-implication-and-reverse-implication] for an explanation +of the `==>` and `<==` operators. + +## Logical Expressions + +```` +LogicalExpression(allowLemma, allowLambda) = + RelationalExpression(allowLemma, allowLambda) + [ ( "&&" RelationalExpression(allowLemma, allowLambda) + { "&&" RelationalExpression(allowLemma, allowLambda) } + | "||" RelationalExpression(allowLemma, allowLambda) + { "||" RelationalExpression(allowLemma, allowLambda) } + ) + ] +```` + +See section [#sec-conjunction-and-disjunction] for an explanation +of the `&&` (or ∧) and `||` (or ∨) operators. + +## Relational Expressions +```` +RelationalExpression(allowLemma, allowLambda) = + Term(allowLemma, allowLambda) + [ RelOp Term(allowLemma, allowLambda) + { RelOp Term(allowLemma, allowLambda) } ] + +RelOp = + ( "==" [ "#" "[" Expression(allowLemma: true, allowLambda: true) "]" ] + | "<" | ">" | "<=" | ">=" + | "!=" [ "#" "[" Expression(allowLemma: true, allowLambda: true) "]" ] + | "in" + | "!in" + | "!!" + ) + +```` + +The relation expressions that have a ``RelOp`` compare two or more terms. +As explained in section [#sec-basic-types], `==`, `!=`, ``<``, `>`, `<=`, and `>=` +and their corresponding Unicode equivalents are _chaining_. + +The `in` and `!in` operators apply to collection types as explained in +section [#sec-collection-types] and represent membership or non-membership +respectively. + +The `!!` represents disjointness for sets and multisets as explained in +sections [#sec-sets] and [#sec-multisets]. + +Note that `x ==#[k] y` is the prefix equality operator that compares +co-inductive values for equality to a nesting level of k, as +explained in section [#sec-co-equality]. + +## Terms +```` +Term(allowLemma, allowLambda) = + Factor(allowLemma, allowLambda) + { AddOp Factor(allowLemma, allowLambda) } +AddOp = ( "+" | "-" ) +```` + +`Terms` combine `Factors` by adding or subtracting. +Addition has these meanings for different types: + +* Arithmetic addition for numeric types (section [#sec-numeric-types]). +* Union for sets and multisets (sections [#sec-sets] and [#sec-multisets]) +* Concatenation for sequences (section [#sec-sequences]) + +Subtraction is arithmetic subtraction for numeric types, and set or multiset +difference for sets and multisets. + +## Factors +```` +Factor(allowLemma, allowLambda) = + UnaryExpression(allowLemma, allowLambda) + { MulOp UnaryExpression(allowLemma, allowLambda) } +MulOp = ( "*" | "/" | "%" ) +```` + +A ``Factor`` combines ``UnaryExpression``s using multiplication, +division, or modulus. For numeric types these are explained in +section [#sec-numeric-types]. + +Only `*` has a non-numeric application. It represents set or multiset +intersection as explained in sections [#sec-sets] and [#sec-multisets]. + +## Unary Expressions + +```` +UnaryExpression(allowLemma, allowLambda) = + ( "-" UnaryExpression(allowLemma, allowLambda) + | "!" UnaryExpression(allowLemma, allowLambda) + | PrimaryExpression_(allowLemma, allowLambda) + ) + +```` + +A ``UnaryExpression`` applies either numeric (section [#sec-numeric-types]) +or logical (section [#sec-booleans]) negation to its operand. + +## Primary Expressions + +```` +PrimaryExpression_(allowLemma, allowLambda) = + ( MapDisplayExpr { Suffix } + | LambdaExpression(allowLemma) + | EndlessExpression(allowLemma, allowLambda) + | NameSegment { Suffix } + | SeqDisplayExpr { Suffix } + | SetDisplayExpr { Suffix } + | MultiSetExpr { Suffix } + | ConstAtomExpression { Suffix } + ) + +```` + +After descending through all the binary and unary operators we arrive at +the primary expressions which are explained in subsequent sections. As +can be seen, a number of these can be followed by 0 or more ``Suffix``es +to select a component of the value. + +If the `allowLambda` is false then ``LambdaExpression``s are not +recognized in this context. + +## Lambda expressions +```` +LambdaExpression(allowLemma) = + ( WildIdent + | "(" [ IdentTypeOptional { "," IdentTypeOptional } ] ")" + ) + LambdaSpec_ + LambdaArrow Expression(allowLemma, allowLambda: true) + +LambdaArrow = ( "=>" | "->" ) +```` + +See section [#sec-lambda-specification] for a description of ``LambdaSpec``. + +In addition to named functions, Dafny supports expressions that define +functions. These are called _lambda (expression)s_ (some languages +know them as _anonymous functions_). A lambda expression has the +form: +``` +(\(_params_\)) \(_specification_\) => \(_body_\) +``` +where `\(_params_\)` is a comma-delimited list of parameter +declarations, each of which has the form `x` or `x: T`. The type `T` +of a parameter can be omitted when it can be inferred. If the +identifier `x` is not needed, it can be replaced by "`_`". If +`\(_params_\)` consists of a single parameter `x` (or `_`) without an +explicit type, then the parentheses can be dropped; for example, the +function that returns the successor of a given integer can be written +as the following lambda expression: +``` +x => x + 1 +``` + +The `\(_specification_\)` is a list of clauses `requires E` or +`reads W`, where `E` is a boolean expression and `W` is a frame +expression. + +`\(_body_\)` is an expression that defines the function's return +value. The body must be well-formed for all possible values of the +parameters that satisfy the precondition (just like the bodies of +named functions and methods). In some cases, this means it is +necessary to write explicit `requires` and `reads` clauses. For +example, the lambda expression +``` +x requires x != 0 => 100 / x +``` +would not be well-formed if the `requires` clause were omitted, +because of the possibility of division-by-zero. + +In settings where functions cannot be partial and there are no +restrictions on reading the heap, the _eta expansion_ of a function +`F: T -> U` (that is, the wrapping of `F` inside a lambda expression +in such a way that the lambda expression is equivalent to `F`) would +be written `x => F(x)`. In Dafny, eta expansion must also account for +the precondition and reads set of the function, so the eta expansion +of `F` looks like: +``` +x requires F.requires(x) reads F.reads(x) => F(x) +``` + +## Left-Hand-Side Expressions +```` +Lhs = + ( NameSegment { Suffix } + | ConstAtomExpression Suffix { Suffix } + ) +```` + +A left-hand-side expression is only used on the left hand +side of an ``UpdateStmt``. + +TODO: Try to give examples showing how these kinds of +left-hand-sides are possible. + +## Right-Hand-Side Expressions +```` +Rhs = + ( ArrayAllocation_ + | ObjectAllocation_ + | Expression(allowLemma: false, allowLambda: true) + | HavocRhs_ + ) + { Attribute } +```` + +An ``Rhs`` is either array allocation, an object allocation, +an expression, or a havoc right-hand-side, optionally followed +by one or more ``Attribute``s. + +Right-hand-side expressions appear in the following constructs: +``ReturnStmt``, ``YieldStmt``, ``UpdateStmt``, or ``VarDeclStatement``. +These are the only contexts in which arrays or objects may be +allocated, or in which havoc may be produced. + +## Array Allocation +```` +ArrayAllocation_ = "new" Type "[" Expressions "]" +```` + +This allocates a new single or multi-dimensional array as explained in +section [#sec-array-types]. + +## Object Allocation +```` +ObjectAllocation_ = "new" Type [ "(" [ Expressions ] ")" ] +```` + +This allocated a new object of a class type as explained +in section [#sec-class-types]. + +## Havoc Right-Hand-Side +```` +HavocRhs_ = "*" +```` +A havoc right-hand-side produces an arbitrary value of its associated +type. To get a more constrained arbitrary value the "assign-such-that" +operator (`:|`) can be used. See section [#sec-update-statement]. + +## Constant Or Atomic Expressions +```` +ConstAtomExpression = + ( LiteralExpression_ + | FreshExpression_ + | OldExpression_ + | CardinalityExpression_ + | NumericConversionExpression_ + | ParensExpression + ) +```` +A ``ConstAtomExpression`` represent either a constant of some type, or an +atomic expression. A ``ConstAtomExpression`` is never an l-value. Also, a +``ConstAtomExpression`` is never followed by an open parenthesis (but could +very well have a suffix that starts with a period or a square bracket). +(The "Also..." part may change if expressions in Dafny could yield +functions.) + +## Literal Expressions +```` +LiteralExpression_ = + ( "false" | "true" | "null" | Nat | Dec | + charToken | stringToken | "this") +```` +A literal expression is a boolean literal, a null object reference, +an unsigned integer or real literal, a character or string literal, +or "this" which denote the current object in the context of +an instance method or function. + +## Fresh Expressions +```` +FreshExpression_ = "fresh" "(" Expression(allowLemma: true, allowLambda: true) ")" +```` + +`fresh(e)` returns a boolean value that is true if +the objects referenced in expression `e` were all +freshly allocated in the current method invocation. +The argument of `fresh` must be either an object reference +or a collection of object references. + +## Old Expressions +```` +OldExpression_ = "old" "(" Expression(allowLemma: true, allowLambda: true) ")" +```` + +An _old expression_ is used in postconditions. `old(e)` evaluates to +the value expression `e` had on entry to the current method. + +## Cardinality Expressions +```` +CardinalityExpression_ = "|" Expression(allowLemma: true, allowLambda: true) "|" +```` + +For a collection expression `c`, `|c|` is the cardinality of `c`. For a +set or sequence the cardinality is the number of elements. For +a multiset the cardinality is the sum of the multiplicities of the +elements. For a map the cardinality is the cardinality of the +domain of the map. Cardinality is not defined for infinite maps. +For more see section [#sec-collection-types]. + +## Numeric Conversion Expressions +```` +NumericConversionExpression_ = + ( "int" | "real" ) "(" Expression(allowLemma: true, allowLambda: true) ")" +```` +Numeric conversion expressions give the name of the target type +followed by the expression being converted in parentheses. +This production is for `int` and `real` as the target types +but this also applies more generally to other numeric types, +e.g. `newtypes`. See section [#sec-numeric-conversion-operations]. + +## Parenthesized Expression +```` +ParensExpression = + "(" [ Expressions ] ")" +```` +A ``ParensExpression`` is a list of zero or more expressions +enclosed in parentheses. + +If there is exactly one expression enclosed then the value is just +the value of that expression. + +If there are zero or more than one the result is a `tuple` value. +See section [#sec-tuple-types]. + +## Sequence Display Expression +```` +SeqDisplayExpr = "[" [ Expressions ] "]" +```` +A sequence display expression provide a way to constructing +a sequence with given values. For example + +``` +[1, 2, 3] +``` +is a sequence with three elements in it. +See section [#sec-sequences] for more information on +sequences. + +## Set Display Expression +```` +SetDisplayExpr = [ "iset" ] "{" [ Expressions ] "}" +```` + +A set display expression provide a way to constructing +a set with given elements. If the keyword `iset` is present +then a potentially infinite set is constructed. + +For example + +``` +{1, 2, 3} +``` +is a set with three elements in it. +See section [#sec-sets] for more information on +sets. + +## Multiset Display or Cast Expression +```` +MultiSetExpr = + "multiset" + ( "{" [ Expressions ] "}" + | "(" Expression(allowLemma: true, allowLambda: true) ")" + ) +```` + +A multiset display expression provide a way to constructing +a multiset with given elements and multiplicity. For example + +``` +multiset{1, 1, 2, 3} +``` +is a multiset with three elements in it. The number 1 has a multiplicity of 2, +the others a multiplicity of 1. + +On the other hand, a multiset cast expression converts a set or a sequence +into a multiset as shown here: + +``` +var s : set := {1, 2, 3}; +var ms : multiset := multiset(s); +ms := ms + multiset{1}; +var sq : seq := [1, 1, 2, 3]; +var ms2 : multiset := multiset(sq); +assert ms == ms2; +``` + +See section [#sec-multisets] for more information on +multisets. + +## Map Display Expression +```` +MapDisplayExpr = ("map" | "imap" ) "[" [ MapLiteralExpressions ] "]" +MapLiteralExpressions = + Expression(allowLemma: true, allowLambda: true) + ":=" Expression(allowLemma: true, allowLambda: true) + { "," Expression(allowLemma: true, allowLambda: true) + ":=" Expression(allowLemma: true, allowLambda: true) + } +```` + +A map display expression builds a finite or potentially infinite +map from explicit ``MapLiteralExpressions``. For example: + +``` +var m := map[1 := "a", 2 := "b"]; +ghost var im := imap[1 := "a", 2 := "b"]; +``` + +Note that `imap`s may only appear in ghost contexts. See +section [#sec-finite-and-infinite-maps] for more details on maps and imaps. + +## Endless Expression +```` +EndlessExpression(allowLemma, allowLambda) = + ( IfExpression_(allowLemma, allowLambda) + | MatchExpression(allowLemma, allowLambda) + | QuantifierExpression(allowLemma, allowLambda) + | SetComprehensionExpr(allowLemma, allowLambda) + | StmtInExpr Expression(allowLemma, allowLambda) + | LetExpr(allowLemma, allowLambda) + | MapComprehensionExpr(allowLemma, allowLambda) + ) +```` + + +``EndlessExpression`` gets it name from the fact that all its alternate +productions have no terminating symbol to end them, but rather they +all end with an ``Expression`` at the end. The various +``EndlessExpression`` alternatives are described below. + +## If Expression +```` +IfExpression_(allowLemma, allowLambda) = + "if" Expression(allowLemma: true, allowLambda: true) + "then" Expression(allowLemma: true, allowLambda: true) + "else" Expression(allowLemma, allowLambda) +```` + +The ``IfExpression`` is a conditional expression. It first evaluates +the expression following the `if`. If it evaluates to `true` then +it evaluates the expression following the `then` and that is the +result of the expression. If it evaluates to `false` then the +expression following the `else` is evaluated and that is the result +of the expression. It is important that only the selected expression +is evaluated as the following example shows. + +``` +var k := 10 / x; // error, may divide by 0. +var m := if x != 0 then 10 / x else 1; // ok, guarded +``` + +## Case Bindings and Patterns +```` +CaseBinding_ = + "case" + ( Ident [ "(" CasePattern { "," CasePattern } ")" ] + | "(" CasePattern { "," CasePattern } ")" + ) + +CasePattern = + ( Ident "(" [ CasePattern { "," CasePattern } ] ")" + | "(" [ CasePattern { "," Casepattern } ] ")" + | IdentTypeOptional + ) +```` + +Case bindings and patterns are used for (possibly nested) +pattern matching on inductive or coinductive values. +The ``CaseBinding_`` construct is used in +``CaseStatement`` and ``CaseExpression``s. +Besides its use in ``CaseBinding_``, ``CasePattern``s are used +in ``LetExpr``s and ``VarDeclStatement``s. + +When matching an inductive or coinductive value in +a ``MatchStmt`` or ``MatchExpression``, there must be +a ``CaseBinding_`` for each constructor. A tuple is +considered to have a single constructor. +The ``Ident`` of the ``CaseBinding_`` must match the name +of a constructor (or in the case of a tuple the ``Ident`` is +absent and the second alternative is chosen). +The ``CasePattern``s inside the parenthesis are then +matched against the argument that were given to the +constructor when the value was constructed. +The number of ``CasePattern``s must match the number +of parameters to the constructor (or the arity of the +tuple). + +The ``CasePattern``s may be nested. The set of non-constructor-name +identifiers contained in a ``CaseBinding_`` must be distinct. +They are bound to the corresponding values in the value being +matched. + +## Match Expression + +```` +MatchExpression(allowLemma, allowLambda) = + "match" Expression(allowLemma, allowLambda) + ( "{" { CaseExpression(allowLemma: true, allowLambda: true) } "}" + | { CaseExpression(allowLemma, allowLambda) } + ) + +CaseExpression(allowLemma, allowLambda) = + CaseBinding_ "=>" Expression(allowLemma, allowLambda) +```` + +A ``MatchExpression`` is used to conditionally evaluate and select an +expression depending on the value of an algebraic type, i.e. an inductive +type, or a co-inductive type. + +The ``Expression`` following the `match` keyword is called the +_selector_. There must be a ``CaseExpression`` for each constructor of +the type of the selector. The ``Ident`` following the `case` keyword in a +``CaseExpression`` is the name of a constructor of the selector's type. +It may be absent if the expression being matched is a tuple since these +have no constructor name. + +If the constructor has parameters then in the ``CaseExpression`` the +constructor name must be followed by a parenthesized list of ``CasePattern``s. +If the constructor has no parameters then the +``CaseExpression`` must not have a following ``CasePattern`` list. +All of the identifiers in the ``CasePattern``s must be distinct. +If types for the identifiers are not given then types are inferred +from the types of the constructor's parameters. If types are +given then they must agree with the types of the +corresponding parameters. + +A ``MatchExpression`` is evaluated by first evaluating the selector. +Then the ``CaseClause`` is selected for the constructor that was +used to construct the evaluated selector. If the constructor had +parameters then the actual values used to construct the selector +value are bound to the identifiers in the identifier list. +The expression to the right of the `=>` in the ``CaseClause`` is then +evaluated in the environment enriched by this binding. The result +of that evaluation is the result of the ``MatchExpression``. + +Note that the braces enclosing the ``CaseClause``s may be omitted. + +## Quantifier Expression +```` +QuantifierExpression(allowLemma, allowLambda) = + ( "forall" | "exists" ) QuantifierDomain "::" + Expression(allowLemma, allowLambda) + +QuantifierDomain = + IdentTypeOptional { "," IdentTypeOptional } { Attribute } + [ "|" Expression(allowLemma: true, allowLambda: true) ] +```` + +A ``QuantifierExpression`` is a boolean expression that specifies that a +given expression (the one following the "::") is true for all (for +**forall**) or some (for **exists**) combination of values of the +quantified variables, namely those in the ``QuantifierDomain``. + +Here are some examples: +``` +assert forall x : nat | x <= 5 :: x * x <= 25; +(forall n :: 2 <= n ==> (exists d :: n < d && d < 2*n)) +``` + +or using the Unicode symbols: + +``` +assert \(∀\) x : nat | x <= 5 \(•\) x * x <= 25; +(\(∀\) n \(•\) 2 <= n ==> (\(∃\) d \(•\) n < d && d < 2*n)) +``` + +The quantifier identifiers are _bound_ within the scope of the +expressions in the ``QuantifierExpression``. + +It types are not given for the quantified identifiers then Dafny +attempts to infer their types from the context of the expressions. +It this is not possible the program is in error. + + +## Set Comprehension Expressions +```` +SetComprehensionExpr(allowLemma, allowLambda) = + [ "set" | "iset" ] + IdentTypeOptional { "," IdentTypeOptional } { Attribute } + "|" Expression(allowLemma, allowLambda) + [ "::" Expression(allowLemma, allowLambda) ] +```` + +A set comprehension expression is an expressions that yields a set +(possibly infinite if `iset` is used) that +satisfies specified conditions. There are two basic forms. + +If there is only one quantified variable the optional ``"::" Expression`` +need not be supplied, in which case it is as if it had been supplied +and the expression consists solely of the quantified variable. +That is, + +``` +set x : T | P(x) +``` + +is equivalent to + +``` +set x : T | P(x) :: x +``` + +For the full form + +``` +var S := set x1:T1, x2:T2 ... | P(x1, x2, ...) :: Q(x1, x2, ...) +``` + +the elements of `S` will be all values resulting from evaluation of `Q(x1, x2, ...)` +for all combinations of quantified variables `x1, x2, ...` such that +predicate `P(x1, x2, ...)` holds. For example, + +``` +var S := set x:nat, y:nat | x < 2 && y < 2 :: (x, y) +``` +would yield `S == {(0, 0), (0, 1), (1, 0), (1,1) }` + +The types on the quantified variables are optional and if not given Dafny +will attempt to infer them from the contexts in which they are used in the +`P` or `Q` expressions. + +If a finite set was specified ("set" keyword used), Dafny must be able to prove that the +result is finite otherwise the set comprehension expression will not be +accepted. + +Set comprehensions involving reference types such as + +``` +set o: object | true +``` + +are allowed in ghost contexts. In particular, in ghost contexts, the +check that the result is finite should allow any set comprehension +where the bound variable is of a reference type. In non-ghost contexts, +it is not allowed, because--even though the resulting set would be +finite--it is not pleasant or practical to compute at run time. + +## Statements in an Expression +```` +StmtInExpr = ( AssertStmt | AssumeStmt | CalcStmt ) +```` + +A ``StmtInExpr`` is a kind of statement that is allowed to +precede an expression in order to ensure that the expression +can be evaluated without error. For example: + +``` +assume x != 0; 10/x +``` + +`Assert`, `assume` and `calc` statements can be used in this way. + +## Let Expression + +```` +LetExpr(allowLemma, allowLambda) = + [ "ghost" ] "var" CasePattern { "," CasePattern } + ( ":=" | { Attribute } ":|" ) + Expression(allowLemma: false, allowLambda: true) + { "," Expression(allowLemma: false, allowLambda: true) } ";" + Expression(allowLemma, allowLambda) +```` + +A `let` expression allows binding of intermediate values to identifiers +for use in an expression. The start of the `let` expression is +signaled by the `var` keyword. They look much like a local variable +declaration except the scope of the variable only extends to the +enclosed expression. + +For example: +``` +var sum := x + y; sum * sum +``` + +In the simple case the ``CasePattern`` is just an identifier with optional +type (which if missing is inferred from the rhs). + +The more complex case allows destructuring of constructor expressions. +For example: + +``` +datatype Stuff = SCons(x: int, y: int) | Other +function GhostF(z: Stuff): int + requires z.SCons? +{ + var SCons(u, v) := z; var sum := u + v; sum * sum +} +``` + +## Map Comprehension Expression +```` +MapComprehensionExpr(allowLemma, allowLambda) = + ( "map" | "imap" ) IdentTypeOptional { Attribute } + [ "|" Expression(allowLemma: true, allowLambda: true) ] + "::" Expression(allowLemma, allowLambda) +```` + +A ``MapComprehensionExpr`` defines a finite or infinite map value +by defining a domain (using the ``IdentTypeOptional`` and the optional +condition following the "|") and for each value in the domain, +giving the mapped value using the expression following the "::". + +For example: +``` +function square(x : int) : int { x * x } +method test() +{ + var m := map x : int | 0 <= x <= 10 :: x * x; + ghost var im := imap x : int :: x * x; + ghost var im2 := imap x : int :: square(x); +} +``` + +Dafny maps must be finite, so the domain must be constrained to be finite. +But imaps may be infinite as the example shows. The last example shows +creation of an infinite map that gives the same results as a function. + + + +## Name Segment +```` +NameSegment = Ident [ GenericInstantiation | HashCall ] +```` + +A ``NameSegment`` names a Dafny entity by giving its declared +name optionally followed by information to +make the name more complete. For the simple case it is +just an identifier. + +If the identifier is for a generic entity it is followed by +a ``GenericInstantiation`` which provides actual types for +the type parameters. + +To reference a prefix predicate (see section [#sec-copredicates]) or +prefix lemma (see section [#sec-prefix-lemmas]), the identifier +must be the name of the copredicate or colemma and it must be +followed by a ``HashCall``. + +## Hash Call +```` +HashCall = "#" [ GenericInstantiation ] + "[" Expression(allowLemma: true, allowLambda: true) "]" + "(" [ Expressions ] ")" +```` +A ``HashCall`` is used to call the prefix for a copredicate or colemma. +In the non-generic case it just insert `"#[k]"` before the call argument +list where k is the number of recursion levels. + +In the case where the `colemma` is generic, the generic type +argument is given before. Here is an example: + +``` +codatatype Stream = Nil | Cons(head: int, stuff: T, tail: Stream) + +function append(M: Stream, N: Stream): Stream +{ + match M + case Nil => N + case Cons(t, s, M') => Cons(t, s, append(M', N)) +} + +function zeros(s : T): Stream +{ + Cons(0, s, zeros(s)) +} + +function ones(s: T): Stream +{ + Cons(1, s, ones(s)) +} + +copredicate atmost(a: Stream, b: Stream) +{ + match a + case Nil => true + case Cons(h,s,t) => b.Cons? && h <= b.head && atmost(t, b.tail) +} + +colemma {:induction false} Theorem0(s: T) + ensures atmost(zeros(s), ones(s)) +{ + // the following shows two equivalent ways to getting essentially the + // co-inductive hypothesis + if (*) { + Theorem0#[_k-1](s); + } else { + Theorem0(s); + } +} + +``` + +where the ``HashCall`` is `"Theorem0#[_k-1](s);"`. +See sections [#sec-copredicates] and [#sec-prefix-lemmas]. + +## Suffix +```` +Suffix = + ( AugmentedDotSuffix_ + | DatatypeUpdateSuffix_ + | SubsequenceSuffix_ + | SlicesByLengthSuffix_ + | SequenceUpdateSuffix_ + | SelectionSuffix_ + | ArgumentListSuffix_ + ) +```` + +The ``Suffix`` non-terminal describes ways of deriving a new value from +the entity to which the suffix is appended. There are six kinds +of suffixes which are described below. + +### Augmented Dot Suffix +```` +AugmentedDotSuffix_ = ". " DotSuffix [ GenericInstantiation | HashCall ] +```` + +An augmented dot suffix consists of a simple ``DotSuffix`` optionally +followed by either + +* a ``GenericInstantiation`` (for the case where the item +selected by the ``DotSuffix`` is generic), or +* a ``HashCall`` for the case where we want to call a prefix copredicate + or colemma. The result is the result of calling the prefix copredicate + or colemma. + +### Datatype Update Suffix + +```` +DatatypeUpdateSuffix_ = + "." "(" MemberBindingUpdate { "," MemberBindingUpdate } ")" + +MemberBindingUpdate = + ( ident | digits ) ":=" Expression(allowLemma: true, allowLambda: true) +```` + +A datatype update suffix is used to produce a new datatype value +that is the same as an old datatype value except that the +value corresponding to a given destructor has the specified value. +In a ``MemberBindingUpdate``, the ``ident`` or ``digits`` is the +name of a destructor (i.e. formal parameter name) for one of the +constructors of the datatype. The expression to the right of the +":=" is the new value for that formal. + +All of the destructors in a ``DatatypeUpdateSuffix_`` must be +for the same constructor, and if they do not cover all of the +destructors for that constructor then the datatype value being +updated must have a value derived from that same constructor. + +Here is an example: + +``` +module NewSyntax { +datatype MyDataType = MyConstructor(myint:int, mybool:bool) + | MyOtherConstructor(otherbool:bool) + | MyNumericConstructor(42:int) + +method test(datum:MyDataType, x:int) + returns (abc:MyDataType, def:MyDataType, ghi:MyDataType, jkl:MyDataType) + requires datum.MyConstructor?; + ensures abc == datum.(myint := x + 2); + ensures def == datum.(otherbool := !datum.mybool); + ensures ghi == datum.(myint := 2).(mybool := false); + // Resolution error: no non_destructor in MyDataType + //ensures jkl == datum.(non_destructor := 5); + ensures jkl == datum.(42 := 7); +{ + abc := MyConstructor(x + 2, datum.mybool); + abc := datum.(myint := x + 2); + def := MyOtherConstructor(!datum.mybool); + ghi := MyConstructor(2, false); + jkl := datum.(42 := 7); + + assert abc.(myint := abc.myint - 2) == datum.(myint := x); +} +} +``` + + + +### Subsequence Suffix +```` +SubsequenceSuffix_ = + "[" [ Expression(allowLemma: true, allowLambda: true) ] + ".." [ Expression(allowLemma: true, allowLambda: true) ] + "]" +```` +A subsequence suffix applied to a sequence produces a new sequence whose +elements are taken from a contiguous part of the original sequence. For +example, expression `s[lo..hi]` for sequence `s`, and integer-based +numerics `lo` and `hi` satisfying `0 <= lo <= hi <= |s|`. See +section [#sec-other-sequence-expressions] for details. + +### Slices By Length Suffix +```` +SlicesByLengthSuffix_ = + "[" Expression(allowLemma: true, allowLambda: true) + ":" Expression(allowLemma: true, allowLambda: true) + { ":" Expression(allowLemma: true, allowLambda: true) } + [ ":" ] + "]" +```` + +Applying a ``SlicesByLengthSuffix_`` to a sequence produces a +sequence of subsequences of the original sequence. +See section [#sec-other-sequence-expressions] for details. + +### Sequence Update Suffix +```` +SequenceUpdateSuffix_ = + "[" Expression(allowLemma: true, allowLambda: true) + ":=" Expression(allowLemma: true, allowLambda: true) + "]" +```` + +For a sequence `s` and expressions `i` and `v`, the expression +`s[i := v]` is the same as the sequence `s` except that at +index `i` it has value `v`. + +### Selection Suffix +```` +SelectionSuffix_ = + "[" Expression(allowLemma: true, allowLambda: true) + { "," Expression(allowLemma: true, allowLambda: true) } + "]" +```` + +If a ``SelectionSuffix_`` has only one expression in it, it is a +zero-based index that may be used to select a single element of a +sequence or from a single-dimensional array. + +If a ``SelectionSuffix_`` has more than one expression in it, then +it is a list of indices to index into a multi-dimensional array. +The rank of the array must be the same as the number of indices. + +### Argument List Suffix +```` +ArgumentListSuffix_ = "(" [ Expressions ] ")" +```` + +An argument list suffix is a parenthesized list of expressions that +are the arguments to pass to a method or function that is being +called. Applying such a suffix caused the method or function +to be called and the result is the result of the call. + +## Expression Lists +```` +Expressions = + Expression(allowLemma: true, allowLambda: true) + { "," Expression(allowLemma: true, allowLambda: true) } +```` + +The ``Expressions`` non-terminal represents a list of +one or more expressions separated by a comma. + +# Module Refinement +TODO: Write this section. + +# Attributes +```` +Attribute = "{" ":" AttributeName [ Expressions ] "}" +```` +Dafny allows many of its entities to be annotated with _Attributes_. +The grammar shows where the attribute annotations may appear. + +Here is an example of an attribute from the Dafny test suite: + +``` +{:MyAttribute "hello", "hi" + "there", 57} +``` + +In general an attribute may have any name the user chooses. It may be +followed by a comma separated list of expressions. These expressions will +be resolved and type-checked in the context where the attribute appears. + +## Dafny Attribute Implementation Details +In the Dafny implementation the `Attributes` type holds the name of +the attribute, a list of ``Expression`` arguments and a link to the +previous Attributes object for that Dafny entity. So for each +Dafny entity that has attributes we have a list of them. + +Dafny stores attributes on the following kinds of entities: +Declaration (base class), ModuleDefinition, Statement, +AssignmentRhs, LocalVariable, LetExpr, ComprehensionExpr, +MaybeFreeExpression, Specification. + +TODO: Dafny internals information should go into a separate +document on Dafny internals. + +## Dafny Attributes +All entities that Dafny translates to Boogie have their attributes +passed on to Boogie except for the `{:axiom}` attribute (which +conflicts with Boogie usage) and the `{:trigger}` attribute which is +instead converted into a Boogie quantifier _trigger_. See Section 11 of +[@Leino:Boogie2-RefMan]. + +Dafny has special processing for some attributes. For some attributes the +setting is only looked for on the entity of interest. For others we start +at the entity and if the attribute is not there, look up in the hierarchy +(enclosing class and enclosing modules). The latter case is checked by +the ContainsBoolAtAnyLevel method in the Dafny source. The attribute +declaration closest to the entity overrides those further away. + +For attributes with a single boolean expression argument, the attribute +with no argument is interpreted as if it were true. + +The attributes that are processed specially by Dafny are described in the +following sections. + +### assumption +This attribute can only be placed on a local ghost bool +variable of a method. Its declaration cannot have a rhs, but it is +allowed to participate as the lhs of exactly one assignment of the +form: `b := b && expr;`. Such a variable declaration translates in the +Boogie output to a declaration followed by an `assume b` command. TODO: +What is the motivation for this? + +### autoReq boolExpr +For a function declaration, if this attribute is set true at the nearest +level, then its `requires` clause is strengthed sufficiently so that +it may call the functions that it calls. + +For following example +``` +function f(x:int) : bool + requires x > 3 +{ + x > 7 +} + +// Should succeed thanks to auto_reqs +function {:autoReq} g(y:int, b:bool) : bool +{ + if b then f(y + 2) else f(2*y) +} +``` +the `{:autoReq}` attribute causes Dafny to +deduce a `requires` clause for g as if it had been +declared +``` +function g(y:int, b:bool) : bool + requires if b then y + 2 > 3 else 2 * y > 3 +{ + if b then f(y + 2) else f(2*y) +} +``` + +### autocontracts +Dynamic frames [@Kassios:FM2006;@SmansEtAl:VeriCool;@SmansEtAl:ImplicitDynamicFrames; +@LEINO:Dafny:DynamicFrames] +are frame expressions that can vary dynamically during +program execution. AutoContracts is an experimental feature that will +fill much of the dynamic-frames boilerplate into a class. + +From the user's perspective, what needs to be done is simply: + +* mark the class with {:autocontracts} +* declare a function (or predicate) called Valid() + + +AutoContracts will then: + +* Declare: +``` + ghost var Repr: set(object); +``` + +* For function/predicate Valid(), insert: +``` + reads this, Repr +``` +* Into body of Valid(), insert (at the beginning of the body): +``` + this in Repr && null !in Repr +``` +* and also insert, for every array-valued field A declared in the class: +``` + (A != null ==> A in Repr) && +``` +* and for every field F of a class type T where T has a field called Repr, also insert: +``` + (F != null ==> F in Repr && F.Repr SUBSET Repr && this !in Repr) +``` +* Except, if A or F is declared with {:autocontracts false}, then the implication will not +be added. + +* For every constructor, add: +``` + modifies this + ensures Valid() && fresh(Repr - {this}) +``` +* At the end of the body of the constructor, add: +``` + Repr := {this}; + if (A != null) { Repr := Repr + {A}; } + if (F != null) { Repr := Repr + {F} + F.Repr; } +``` +* For every method, add: + +``` + requires Valid() + modifies Repr + ensures Valid() && fresh(Repr - old(Repr)) +``` +* At the end of the body of the method, add: +``` + if (A != null) { Repr := Repr + {A}; } + if (F != null) { Repr := Repr + {F} + F.Repr; } +``` + +### axiom +The `{:axiom}` attribute may be placed on a function or method. +It means that the post-condition may be assumed to be true +without proof. In that case also the body of the function or +method may be omitted. + +The `{:axiom}` attribute is also used for generated `reveal_*` +lemmas as shown in Section [#sec-opaque]. + +### compile +The `{:compile}` attribute takes a boolean argument. It may be applied to +any top-level declaration. If that argument is false then that declaration +will not be compiled into .Net code. + +### decl +The `{:decl}` attribute may be placed on a method declaration. It +inhibits the error message that has would be given when the method has a +`ensures` clauses but no body. + +TODO: There are no examples of this in the Dafny tests. What is the motivation +for this? + +### fuel +The fuel attributes is used to specify how much "fuel" a function should have, +i.e., how many times Z3 is permitted to unfold it's definition. The +new {:fuel} annotation can be added to the function itself, it which +case it will apply to all uses of that function, or it can overridden +within the scope of a module, function, method, iterator, calc, forall, +while, assert, or assume. The general format is: + +``` +{:fuel functionName,lowFuel,highFuel} +``` + +When applied as an annotation to the function itself, omit +functionName. If highFuel is omitted, it defaults to lowFuel + 1. + +The default fuel setting for recursive functions is 1,2. Setting the +fuel higher, say, to 3,4, will give more unfoldings, which may make +some proofs go through with less programmer assistance (e.g., with +fewer assert statements), but it may also increase verification time, +so use it with care. Setting the fuel to 0,0 is similar to making the +definition opaque, except when used with all literal arguments. + +### heapQuantifier +The `{:heapQuantifier}` attribute may be used on a ``QuantifierExpression``. +When it appears in a quantifier expression it is as if a new heap-valued +quantifier variable was added to the quantification. Consider this code +that is one of the invariants of a while loop. + +``` +invariant forall u {:heapQuantifier} :: f(u) == u + r +``` + +The quantifier is translated into the following Boogie: + +``` +(forall q$heap#8: Heap, u#5: int :: + {:heapQuantifier} + $IsGoodHeap(q$heap#8) && ($Heap == q$heap#8 || $HeapSucc($Heap, q$heap#8)) + ==> $Unbox(Apply1(TInt, TInt, f#0, q$heap#8, $Box(u#5))): int == u#5 + r#0); +``` + +What this is saying is that the quantified expression, `f(u) == u + r`, +which may depend on the heap, is also valid for any good heap that is either the +same as the current heap, or that is derived from it by heap update operations. + +TODO: I think this means that the quantified expression is actually independent of the +heap. Is that true? + +### imported +If a ``MethodDecl`` or ``FunctionDecl`` has an `{:imported}` attribute, +then it is allowed to have a empty body even though it has an **ensures** +clause. Ordinarily a body would be required in order to provide the +proof of the **ensures** clause (but the `(:axiom)` attribute also +provides this facility, so the need for `(:imported)` is not clear.) +A method or function declaration may be given the `(:imported)` attribute. This suppresses +the error message that would be given if a method or function with an `ensures` clause +does not have a body. + +TODO: When would this be used? An example would be helpful. + +TODO: When is this useful or valid? + +### induction +The `{:induction}` attribute controls the application of +proof by induction to two contexts. Given a list of +variables on which induction might be applied, the +`{:induction}` attribute selects a sub-list of those +variables (in the same order) to which to apply induction. + +TODO: Would there be any advantage to taking the order +from the attribute, rather than preserving the original +order? That would seem to give the user more control. + +The two contexts are: + +* A method, in which case the bound variables are all the + in-parameters of the method. +* A quantifier expression, in which case the bound variables + are the bound variables of the quantifier expression. + +The form of the `{:induction}` attribute is one of the following: + +* `{:induction}` -- apply induction to all bound variables +* `{:induction false}` -- suppress induction, that is, don't apply it to any bound variable +* `{:induction L}` where `L` is a list consisting entirely of bound variables +-- apply induction to the specified bound variables +* `{:induction X}` where `X` is anything else -- treat the same as +{:induction}, that is, apply induction to all bound variables. For this +usage conventionally `X` is `true`. + +Here is an example of using it on a quantifier expression: +``` +ghost method Fill_J(s: seq) + requires forall i :: 1 <= i < |s| ==> s[i-1] <= s[i] + ensures forall i,j {:induction j} :: 0 <= i < j < |s| ==> s[i] <= s[j] +{ +} +``` + +### layerQuantifier +When Dafny is translating a quantified expression, if it has +a `{:layerQuantifier}` attribute an additional quantifier +variable is added to the quantifier bound variables. +This variable as the predefined _LayerType_. +A `{:layerQuantifier}` attribute may be placed on a quantifier expression. +Translation of Dafny into Boogie defines a _LayerType_ which has defined zero and +successor constructors. + +The Dafny source has the comment that "if a function is recursive, +then make the reveal lemma quantifier a layerQuantifier." +And in that case it adds the attribute to the quantifier. + +There is no explicit user of the `{:layerQuantifier}` attribute +in the Dafny tests. So I believe this attribute is only used +internally by Dafny and not externally. + +TODO: Need more complete explanation of this attribute. + +### nativeType {#sec-nativetype} +The `{:nativeType}` attribute may only be used on a ``NewtypeDecl`` +where the base type is an integral type. It can take one of the following +forms: + +* `{:nativeType}` - With no parameters it has no effect and the ``NewtypeDecl`` +have its default behavior which is to choose a native type that can hold any +value satisfying the constraints, if possible, otherwise BigInteger is used. +* `{:nativeType true}` - Also gives default ``NewtypeDecl`` behavior, +but gives an error if base type is not integral. +* `{:nativeType false}` - Inhibits using a native type. BigInteger is used +for integral types and BitRational for real types. +* `{:nativeType "typename"}` - This form has an native integral +type name as a string literal. Acceptable values are: "byte", +"sbyte", "ushort", "short", "uint", "int", "ulong" and "long". +An error is reported if the given data type cannot hold all the +values that satisfy the constraint. + + +### opaque {#sec-opaque} +Ordinarily the body of a function is transparent to its users but +sometimes it is useful to hide it. If a function `f` is given the +`{:opaque}` attribute then Dafny hides the body of the function, +so that it can only be seen within its recursive clique (if any), +or if the programmer specifically asks to see it via the `reveal_f()` lemma. + +We create a lemma to allow the user to selectively reveal the function's body +That is, given: + +``` + function {:opaque} foo(x:int, y:int) : int + requires 0 <= x < 5 + requires 0 <= y < 5 + ensures foo(x, y) < 10 + { x + y } +``` + +We produce: + +``` + lemma {:axiom} reveal_foo() + ensures forall x:int, y:int {:trigger foo(x,y)} :: + 0 <= x < 5 && 0 <= y < 5 ==> foo(x,y) == foo_FULL(x,y) +``` + +where `foo_FULL` is a copy of `foo` which does not have its body +hidden. In addition `foo_FULL` is given the +`{:opaque_full}` and `{:auto_generated}` attributes in addition +to the `{:opaque}` attribute (which it got because it is a copy of `foo`). + +### opaque full +The `{:opaque_full}` attribute is used to mark the _full_ version +of an opaque function. See Section [#sec-opaque]. + +### prependAssertToken +This is used internally in Dafny as part of module refinement. +It is an attribute on an assert statement. +The Dafny code has the following comment: + +``` +// Clone the expression, but among the new assert's attributes, indicate +// that this assertion is supposed to be translated into a check. That is, +// it is not allowed to be just assumed in the translation, despite the fact +// that the condition is inherited. +``` + +TODO: Decide if we want to describe this in more detail, or whether +the functionality is already adequately described where +refinement is described. + +### tailrecursion +This attribute is used on a method declarations. It has a boolean argument. + +If specified with a false value it means the user specifically +requested no tail recursion, so none is done. + +If specified with a true value, or if not specified +then tail recursive optimization will be attempted subject to +the following conditions: + +* It is an error if the method is a ghost method and tail +recursion was explicitly requested. +* Only direct recursion is supported, not mutually recursive methods. +* If `{:tailrecursion true}` was specified but the code does not allow it +an error message is given. + +### timeLimitMultiplier +This attribute may be placed on a method or function declaration +and has an integer argument. If `{:timeLimitMultiplier X}` was +specified a `{:timelimit Y}` attributed is passed on to Boogie +where `Y` is `X` times either the default verification time limit +for a function or method, or times the value specified by the +Boogie `timelimit` command-line option. + +### trigger +Trigger attributes are used on quantifiers and comprehensions. +They are translated into Boogie triggers. + +### typeQuantifier +The `{:typeQuantifier}` must be used on a quantifier if it +quantifies over types. + + +## Boogie Attributes +Use the Boogie "/attrHelp" option to get the list of attributes +that Boogie recognizes and their meaning. Here is the output at +the time of this writing. Dafny passes attributes that have +been specified to the Boogie. + +``` +Boogie: The following attributes are supported by this implementation. + + ---- On top-level declarations --------------------------------------------- + + {:ignore} + Ignore the declaration (after checking for duplicate names). + + {:extern} + If two top-level declarations introduce the same name (for example, two + constants with the same name or two procedures with the same name), then + Boogie usually produces an error message. However, if at least one of + the declarations is declared with :extern, one of the declarations is + ignored. If both declarations are :extern, Boogie arbitrarily chooses + one of them to keep; otherwise, Boogie ignore the :extern declaration + and keeps the other. + + {:checksum } + Attach a checksum to be used for verification result caching. + + ---- On implementations and procedures ------------------------------------- + + {:inline N} + Inline given procedure (can be also used on implementation). + N should be a non-negative number and represents the inlining depth. + With /inline:assume call is replaced with "assume false" once inlining depth is reached. + With /inline:assert call is replaced with "assert false" once inlining depth is reached. + With /inline:spec call is left as is once inlining depth is reached. + With the above three options, methods with the attribute {:inline N} are not verified. + With /inline:none the entire attribute is ignored. + + {:verify false} + Skip verification of an implementation. + + {:vcs_max_cost N} + {:vcs_max_splits N} + {:vcs_max_keep_going_splits N} + Per-implementation versions of + /vcsMaxCost, /vcsMaxSplits and /vcsMaxKeepGoingSplits. + + {:selective_checking true} + Turn all asserts into assumes except for the ones reachable from + assumptions marked with the attribute {:start_checking_here}. + Thus, "assume {:start_checking_here} something;" becomes an inverse + of "assume false;": the first one disables all verification before + it, and the second one disables all verification after. + + {:priority N} + Assign a positive priority 'N' to an implementation to control the order + in which implementations are verified (default: N = 1). + + {:id } + Assign a unique ID to an implementation to be used for verification + result caching (default: ":0"). + + {:timeLimit N} + Set the time limit for a given implementation. + + ---- On functions ---------------------------------------------------------- + + {:builtin "spec"} + {:bvbuiltin "spec"} + Rewrite the function to built-in prover function symbol 'fn'. + + {:inline} + {:inline true} + Expand function according to its definition before going to the prover. + + {:never_pattern true} + Terms starting with this function symbol will never be + automatically selected as patterns. It does not prevent them + from being used inside the triggers, and does not affect explicit + trigger annotations. Internally it works by adding {:nopats ...} + annotations to quantifiers. + + {:identity} + {:identity true} + If the function has 1 argument and the use of it has type X->X for + some X, then the abstract interpreter will treat the function as an + identity function. Note, the abstract interpreter trusts the + attribute--it does not try to verify that the function really is an + identity function. + + ---- On variables ---------------------------------------------------------- + + {:existential true} + Marks a global Boolean variable as existentially quantified. If + used in combination with option /contractInfer Boogie will check + whether there exists a Boolean assignment to the existentials + that makes all verification conditions valid. Without option + /contractInfer the attribute is ignored. + + ---- On assert statements -------------------------------------------------- + + {:subsumption n} + Overrides the /subsumption command-line setting for this assertion. + + {:split_here} + Verifies code leading to this point and code leading from this point + to the next split_here as separate pieces. May help with timeouts. + May also occasionally double-report errors. + + ---- The end --------------------------------------------------------------- + +``` + +However a scan of Boogie's sources shows it checks for the +following attributes. + +* `{:$}` +* `{:$renamed$}` +* `{:InlineAssume}` +* `{:PossiblyUnreachable}` +* `{:__dominator_enabled}` +* `{:__enabled}` +* `{:a##post##}` +* `{:absdomain}` +* `{:ah}` +* `{:assumption}` +* `{:assumption_variable_initialization}` +* `{:atomic}` +* `{:aux}` +* `{:both}` +* `{:bvbuiltin}` +* `{:candidate}` +* `{:captureState}` +* `{:checksum}` +* `{:constructor}` +* `{:datatype}` +* `{:do_not_predicate}` +* `{:entrypoint}` +* `{:existential}` +* `{:exitAssert}` +* `{:expand}` +* `{:extern}` +* `{:hidden}` +* `{:ignore}` +* `{:inline}` +* `{:left}` +* `{:linear}` +* `{:linear_in}` +* `{:linear_out}` +* `{:msg}` +* `{:name}` +* `{:originated_from_invariant}` +* `{:partition}` +* `{:positive}` +* `{:post}` +* `{:pre}` +* `{:precondition_previous_snapshot}` +* `{:qid}` +* `{:right}` +* `{:selective_checking}` +* `{:si_fcall}` +* `{:si_unique_call}` +* `{:sourcefile}` +* `{:sourceline}` +* `{:split_here}` +* `{:stage_active}` +* `{:stage_complete}` +* `{:staged_houdini_tag}` +* `{:start_checking_here}` +* `{:subsumption}` +* `{:template}` +* `{:terminates}` +* `{:upper}` +* `{:verified_under}` +* `{:weight}` +* `{:yields}` + +# Dafny User's Guide +## Installing Dafny From Binaries +## Building Dafny from Source +The current version of Dafny only works with Visual Studio 2012, +so if you intend to run Dafny from withing Visual Studio you must +install Visual Studio 2012. + +Dafny performs its verification by translating the Dafny source into +the Boogie intermediate verification language. So Dafny references +data structures defined in the Boogie project. So the first step +is to clone and build Boogie from sources. See +. + +Follow these steps. + +Let _work_ be a working directory. + +Clone Boogie using + +``` +cd work +git clone https://github.com/boogie-org/boogie.git +``` + +Build Boogie using the directions from the Boogie web site, +which for Windows currently are: + +1. Open Source\Boogie.sln in Visual Studio +2. Right click the Boogie solution in the Solution Explorer and click Enable NuGet Package Restore. You will probably get a prompt asking to confirm this. Choose Yes. +3. Click BUILD > Build Solution. + +Clone Dafny using Mercurial. The Dafny directory must be a sibling +of the Boogie directory in order for it to find the Boogie files it needs. + +``` +cd work +hg clone https://hg.codeplex.com/dafny +``` + +Download and install the Visual Studio 2012 SDK from + +* . + +This is needed to build the Visual Studio Extension that +runs Dafny from within Visual Studio 2012. + +Build the command-line Dafny executables. +1. Open dafny\Source\Dafny.sln in Visual Studio +2. Click BUILD > Build Solution. + +Build and install the Dafny Visual Studio extensions + +1. Open dafny/Source/DafnyExtension.sln in Visual Studio +2. Click BUILD > Build Solution. +3. This builds DafnyLanguageService.vsix and DafnyMenu.vsix +in the dafny/Binaries directory. +4. Install these by clicking on them from Windows Explorer. When +prompted, only check installing into Visual Studio 2012. + +## Using Dafny From Visual Studio +To test your installation, you can open Dafny test files +from the dafny/Test subdirectory in Visual Studio 2012. +You will want to use "VIEW/Error List" to ensure that +you see any errors that Dafny detects, and +"VIEW/Output" to see the result of any compilation. + +An example of a valid Dafny test is + +``` +dafny\Test\vstte2012\Tree.dfy +``` + +You can choose "Dafny/Compile" to compile the Dafny +program to C#. Doing that for the above test +produces `Tree.cs` and `Tree.dll` (since this test does +not have a main program). + +The following file: + +``` +D:\gh\dafny\Test\dafny0\Array.dfy +``` + +is an example of a Dafny file with verification errors. +The source will show red squiggles or dots where there +are errors, and the Error List window will describe the +errors. + +## Using Dafny From the Command Line +### Dafny Command Line Options +The command `Dafny.exe /?` gives the following description of +options that can be passed to Dafny. + +``` + ---- Dafny options --------------------------------------------------------- + + Multiple .dfy files supplied on the command line are concatenated into one + Dafny program. + + /dprelude: + choose Dafny prelude file + /dprint: + print Dafny program after parsing it + (use - as to print to console) + /printMode: + NoIncludes disables printing of {:verify false} methods incorporated via the + include mechanism, as well as datatypes and fields included from other files. + NoGhost disables printing of functions, ghost methods, and proof statements + in implementation methods. It also disables anything NoIncludes disables. + /rprint: + print Dafny program after resolving it + (use - as to print to console) + /dafnyVerify: + 0 - stop after typechecking + 1 - continue on to translation, verification, and compilation + /compile: 0 - do not compile Dafny program + 1 (default) - upon successful verification of the Dafny + program, compile Dafny program to .NET assembly + Program.exe (if the program has a Main method) or + Program.dll (othewise), where Program.dfy is the name + of the last .dfy file on the command line + 2 - always attempt to compile Dafny program to C# program + out.cs, regardless of verification outcome + 3 - if there is a Main method and there are no verification + errors, compiles program in memory (i.e., does not write + an output file) and runs it + /spillTargetCode: + 0 (default) - don't write the compiled Dafny program (but + still compile it, if /compile indicates to do so) + 1 - write the compiled Dafny program as a .cs file + /dafnycc Disable features not supported by DafnyCC + /noCheating: + 0 (default) - allow assume statements and free invariants + 1 - treat all assumptions as asserts, and drop free. + /induction: + 0 - never do induction, not even when attributes request it + 1 - only apply induction when attributes request it + 2 - apply induction as requested (by attributes) and also + for heuristically chosen quantifiers + 3 (default) - apply induction as requested, and for + heuristically chosen quantifiers and ghost methods + /inductionHeuristic: + 0 - least discriminating induction heuristic (that is, lean + toward applying induction more often) + 1,2,3,4,5 - levels in between, ordered as follows as far as + how discriminating they are: 0 < 1 < 2 < (3,4) < 5 < 6 + 6 (default) - most discriminating + /noIncludes Ignore include directives + /noNLarith Reduce Z3's knowledge of non-linear arithmetic (*,/,%). + Results in more manual work, but also produces more predictable behavior. + /autoReqPrint: + Print out requirements that were automatically generated by autoReq. + /noAutoReq Ignore autoReq attributes + /allowGlobals Allow the implicit class '_default' to contain fields, instance functions, + and instance methods. These class members are declared at the module scope, + outside of explicit classes. This command-line option is provided to simply + a transition from the behavior in the language prior to version 1.9.3, from + which point onward all functions and methods declared at the module scope are + implicitly static and fields declarations are not allowed at the module scope. + The reference manual is written assuming this option is not given. + + + /nologo suppress printing of version number, copyright message + /env: print command line arguments + 0 - never, 1 (default) - during BPL print and prover log, + 2 - like 1 and also to standard output + /wait await Enter from keyboard before terminating program + /xml: also produce output in XML format to + + ---- Boogie options -------------------------------------------------------- + + Multiple .bpl files supplied on the command line are concatenated into one + Boogie program. + + /proc:

: limits which procedures to check + /noResolve : parse only + /noTypecheck : parse and resolve only + + /print: : print Boogie program after parsing it + (use - as to print to console) + /pretty: + 0 - print each Boogie statement on one line (faster). + 1 (default) - pretty-print with some line breaks. + /printWithUniqueIds : print augmented information that uniquely + identifies variables + /printUnstructured : with /print option, desugars all structured statements + /printDesugared : with /print option, desugars calls + + /overlookTypeErrors : skip any implementation with resolution or type + checking errors + + /loopUnroll: + unroll loops, following up to n back edges (and then some) + /soundLoopUnrolling + sound loop unrolling + /printModel: + 0 (default) - do not print Z3's error model + 1 - print Z3's error model + 2 - print Z3's error model plus reverse mappings + 4 - print Z3's error model in a more human readable way + /printModelToFile: + print model to instead of console + /mv: Specify file where to save the model in BVD format + /enhancedErrorMessages: + 0 (default) - no enhanced error messages + 1 - Z3 error model enhanced error messages + + /printCFG: : print control flow graph of each implementation in + Graphviz format to files named: + ..dot + + /useBaseNameForFileName : When parsing use basename of file for tokens instead + of the path supplied on the command line + + ---- Inference options ----------------------------------------------------- + + /infer: + use abstract interpretation to infer invariants + The default is /infer:i + are as follows (missing means all) + i = intervals + c = constant propagation + d = dynamic type + n = nullness + p = polyhedra for linear inequalities + t = trivial bottom/top lattice (cannot be combined with + other domains) + j = stronger intervals (cannot be combined with other + domains) + or the following (which denote options, not domains): + s = debug statistics + 0..9 = number of iterations before applying a widen (default=0) + /noinfer turn off the default inference, and overrides the /infer + switch on its left + /checkInfer instrument inferred invariants as asserts to be checked by + theorem prover + /interprocInfer + perform interprocedural inference (deprecated, not supported) + /contractInfer + perform procedure contract inference + /instrumentInfer + h - instrument inferred invariants only at beginning of + loop headers (default) + e - instrument inferred invariants at beginning and end + of every block (this mode is intended for use in + debugging of abstract domains) + /printInstrumented + print Boogie program after it has been instrumented with + invariants + + ---- Debugging and general tracing options --------------------------------- + + /trace blurt out various debug trace information + /traceTimes output timing information at certain points in the pipeline + /tracePOs output information about the number of proof obligations + (also included in the /trace output) + /log[:method] Print debug output during translation + + /break launch and break into debugger + + ---- Verification-condition generation options ----------------------------- + + /liveVariableAnalysis: + 0 = do not perform live variable analysis + 1 = perform live variable analysis (default) + 2 = perform interprocedural live variable analysis + /noVerify skip VC generation and invocation of the theorem prover + /verifySnapshots: + verify several program snapshots (named .v0.bpl + to .vN.bpl) using verification result caching: + 0 - do not use any verification result caching (default) + 1 - use the basic verification result caching + 2 - use the more advanced verification result caching + /verifySeparately + verify each input program separately + /removeEmptyBlocks: + 0 - do not remove empty blocks during VC generation + 1 - remove empty blocks (default) + /coalesceBlocks: + 0 = do not coalesce blocks + 1 = coalesce blocks (default) + /vc: n = nested block (default for /prover:Simplify), + m = nested block reach, + b = flat block, r = flat block reach, + s = structured, l = local, + d = dag (default, except with /prover:Simplify) + doomed = doomed + /traceverify print debug output during verification condition generation + /subsumption: + apply subsumption to asserted conditions: + 0 - never, 1 - not for quantifiers, 2 (default) - always + /alwaysAssumeFreeLoopInvariants + usually, a free loop invariant (or assume + statement in that position) is ignored in checking contexts + (like other free things); this option includes these free + loop invariants as assumes in both contexts + /inline: use inlining strategy for procedures with the :inline + attribute, see /attrHelp for details: + none + assume (default) + assert + spec + /printInlined + print the implementation after inlining calls to + procedures with the :inline attribute (works with /inline) + /lazyInline:1 + Use the lazy inlining algorithm + /stratifiedInline:1 + Use the stratified inlining algorithm + /fixedPointEngine: + Use the specified fixed point engine for inference + /recursionBound: + Set the recursion bound for stratified inlining to + be n (default 500) + /inferLeastForUnsat: + Infer the least number of constants (whose names + are prefixed by ) that need to be set to + true for the program to be correct. This turns + on stratified inlining. + /smoke Soundness Smoke Test: try to stick assert false; in some + places in the BPL and see if we can still prove it + /smokeTimeout: + Timeout, in seconds, for a single theorem prover + invocation during smoke test, defaults to 10. + /causalImplies + Translate Boogie's A ==> B into prover's A ==> A && B. + /typeEncoding: + how to encode types when sending VC to theorem prover + n = none (unsound) + p = predicates (default) + a = arguments + m = monomorphic + /monomorphize + Do not abstract map types in the encoding (this is an + experimental feature that will not do the right thing if + the program uses polymorphism) + /reflectAdd In the VC, generate an auxiliary symbol, elsewhere defined + to be +, instead of +. + + ---- Verification-condition splitting -------------------------------------- + + /vcsMaxCost: + VC will not be split unless the cost of a VC exceeds this + number, defaults to 2000.0. This does NOT apply in the + keep-going mode after first round of splitting. + /vcsMaxSplits: + Maximal number of VC generated per method. In keep + going mode only applies to the first round. + Defaults to 1. + /vcsMaxKeepGoingSplits: + If set to more than 1, activates the keep + going mode, where after the first round of splitting, + VCs that timed out are split into pieces and retried + until we succeed proving them, or there is only one + assertion on a single path and it timeouts (in which + case error is reported for that assertion). + Defaults to 1. + /vcsKeepGoingTimeout: + Timeout in seconds for a single theorem prover + invocation in keep going mode, except for the final + single-assertion case. Defaults to 1s. + /vcsFinalAssertTimeout: + Timeout in seconds for the single last + assertion in the keep going mode. Defaults to 30s. + /vcsPathJoinMult: + If more than one path join at a block, by how much + multiply the number of paths in that block, to accomodate + for the fact that the prover will learn something on one + paths, before proceeding to another. Defaults to 0.8. + /vcsPathCostMult: + /vcsAssumeMult: + The cost of a block is + ( + *) * + (1.0 + *) + defaults to 1.0, defaults to 0.01. + The cost of a single assertion or assumption is + currently always 1.0. + /vcsPathSplitMult: + If the best path split of a VC of cost A is into + VCs of cost B and C, then the split is applied if + A >= *(B+C), otherwise assertion splitting will be + applied. Defaults to 0.5 (always do path splitting if + possible), set to more to do less path splitting + and more assertion splitting. + /vcsDumpSplits + For split #n dump split.n.dot and split.n.bpl. + Warning: Affects error reporting. + /vcsCores: + Try to verify VCs at once. Defaults to 1. + /vcsLoad: Sets vcsCores to the machine's ProcessorCount * f, + rounded to the nearest integer (where 0.0 <= f <= 3.0), + but never to less than 1. + + ---- Prover options -------------------------------------------------------- + + /errorLimit: + Limit the number of errors produced for each procedure + (default is 5, some provers may support only 1) + /timeLimit: + Limit the number of seconds spent trying to verify + each procedure + /errorTrace: + 0 - no Trace labels in the error output, + 1 (default) - include useful Trace labels in error output, + 2 - include all Trace labels in the error output + /vcBrackets: + bracket odd-charactered identifier names with |'s. is: + 0 - no (default with non-/prover:Simplify), + 1 - yes (default with /prover:Simplify) + /prover: use theorem prover , where is either the name of + a DLL containing the prover interface located in the + Boogie directory, or a full path to a DLL containing such + an interface. The standard interfaces shipped include: + SMTLib (default, uses the SMTLib2 format and calls Z3) + Z3 (uses Z3 with the Simplify format) + Simplify + ContractInference (uses Z3) + Z3api (Z3 using Managed .NET API) + /proverOpt:KEY[=VALUE] + Provide a prover-specific option (short form /p). + /proverLog: + Log input for the theorem prover. Like filenames + supplied as arguments to other options, can use the + following macros: + @TIME@ expands to the current time + @PREFIX@ expands to the concatenation of strings given + by /logPrefix options + @FILE@ expands to the last filename specified on the + command line + In addition, /proverLog can also use the macro '@PROC@', + which causes there to be one prover log file per + verification condition, and the macro then expands to the + name of the procedure that the verification condition is for. + /logPrefix: + Defines the expansion of the macro '@PREFIX@', which can + be used in various filenames specified by other options. + /proverLogAppend + Append (not overwrite) the specified prover log file + /proverWarnings + 0 (default) - don't print, 1 - print to stdout, + 2 - print to stderr + /proverMemoryLimit: + Limit on the virtual memory for prover before + restart in MB (default:100MB) + /restartProver + Restart the prover after each query + /proverShutdownLimit + Time between closing the stream to the prover and + killing the prover process (default: 0s) + /platform:, + ptype = v11,v2,cli1 + location = platform libraries directory + + Simplify specific options: + /simplifyMatchDepth: + Set Simplify prover's matching depth limit + + Z3 specific options: + /z3opt: specify additional Z3 options + /z3multipleErrors + report multiple counterexamples for each error + /useArrayTheory + use Z3's native theory (as opposed to axioms). Currently + implies /monomorphize. + /useSmtOutputFormat + Z3 outputs a model in the SMTLIB2 format. + /z3types generate multi-sorted VC that make use of Z3 types + /z3lets: 0 - no LETs, 1 - only LET TERM, 2 - only LET FORMULA, + 3 - (default) any + /z3exe: + path to Z3 executable + + CVC4 specific options: + /cvc4exe: + path to CVC4 executable + +``` + + +# References +[BIB] + diff -Nru dafny-1.9.5/Docs/DafnyRef/dafnyx.json dafny-1.9.7/Docs/DafnyRef/dafnyx.json --- dafny-1.9.5/Docs/DafnyRef/dafnyx.json 1970-01-01 00:00:00.000000000 +0000 +++ dafny-1.9.7/Docs/DafnyRef/dafnyx.json 2016-06-05 21:11:14.000000000 +0000 @@ -0,0 +1,4 @@ +{ "name": "dafnyx", + "extend": "dafny", + "extraKeywords": ["inductive"] +} \ No newline at end of file diff -Nru dafny-1.9.5/Docs/DafnyRef/ignores.dic dafny-1.9.7/Docs/DafnyRef/ignores.dic --- dafny-1.9.5/Docs/DafnyRef/ignores.dic 1970-01-01 00:00:00.000000000 +0000 +++ dafny-1.9.7/Docs/DafnyRef/ignores.dic 2016-06-05 21:11:14.000000000 +0000 @@ -0,0 +1,83 @@ +Dafny +lexer +Datatypes +initializable +Multisets +multiset +multisets +disequality +maplets +maplet +datatypes +datatype +datatype's +Dafny's +destructors +nullable +subarray +indices +mixin +supertype +CLU +Async +async +Newtypes +newtype +newtype's +pre +BNF +Polikarpova +Paqui +backticks +colorizer +Daan's +Btw +Codeplex +formedness +forall +newtypes +TODO +updatable +toplevel +bodyless +bool +calc +codatatype +colemma +comethod +copredicate +nat +wildcard +Builtin +builtin +inline +NoUSIdent +iff +timeLimitMultiplier +prependAssertToken +ModuleDefinition +AssignmentRhs +LocalVariable +LetExpr +MaybeFreeExpression +attrHelp +EXE +IDE +SkippingLemma +deconstructing +Leino +Moskal +Agda +Coq +pointwise +SMT +BelowSquare +CoFixpoint +Copredicates +prepending +unrollings +Colemmas +Explies +imaps +NamedExpr +strengthed \ No newline at end of file diff -Nru dafny-1.9.5/Docs/DafnyRef/krml250.bib dafny-1.9.7/Docs/DafnyRef/krml250.bib --- dafny-1.9.5/Docs/DafnyRef/krml250.bib 1970-01-01 00:00:00.000000000 +0000 +++ dafny-1.9.7/Docs/DafnyRef/krml250.bib 2016-06-05 21:11:14.000000000 +0000 @@ -0,0 +1,2026 @@ +@string{lncs = "LNCS"} + +@InCollection{Leino:Dafny:MOD2008, + author = {K. Rustan M. Leino}, + title = {Specification and verification of object-oriented software}, + booktitle = {Engineering Methods and Tools for Software Safety and Security}, + pages = {231-266}, + publisher = {IOS Press}, + year = {2009}, + editor = {Manfred Broy and Wassiou Sitou and Tony Hoare}, + volume = {22}, + series = {NATO Science for Peace and Security Series D: Information and Communication Security}, + note = {Summer School Marktoberdorf 2008 lecture notes}, +} + +@inproceedings{Why:Platform, + author = {Jean-Christophe Filli{\^a}tre and Claude March{\'e}}, + title = {The {Why}/{Krakatoa}/{Caduceus} Platform for Deductive Program Verification}, + booktitle = {Computer Aided Verification, 19th International Conference, CAV 2007}, + editor = {Werner Damm and Holger Hermanns}, + volume = {4590}, + series = lncs, + publisher = {Springer}, + month = jul, + year = {2007}, + pages = {173--177} +} + +@InProceedings{BarrettTinelli:CVC3, + author = {Clark Barrett and Cesare Tinelli}, + title = {{CVC3}}, + booktitle = {Computer Aided Verification, 19th International Conference, CAV 2007}, + editor = {Werner Damm and Holger Hermanns}, + volume = {4590}, + series = lncs, + publisher = {Springer}, + month = jul, + year = {2007}, + pages = {298-302}, +} + +@InProceedings{HubertMarche:SchorrWaite, + author = {Thierry Hubert and Claude March{\'e}}, + title = {A case study of {C} source code verification: the + {S}chorr-{W}aite algorithm}, + booktitle = {Third IEEE International Conference on Software + Engineering and Formal Methods (SEFM 2005)}, + editor = {Bernhard K. Aichernig and Bernhard Beckert}, + publisher = {IEEE Computer Society }, + month = sep, + year = {2005}, + pages = {190-199}, +} + +@Article{BroyPepper:SchorrWaite, + author = {Manfred Broy and Peter Pepper}, + title = {Combining Algebraic and Algorithmic Reasoning: An + Approach to the {S}chorr-{W}aite Algorithm}, + journal = toplas, + volume = {4}, + number = {3}, + month = jul, + year = {1982}, + pages = {362-381}, +} + +@Article{MehtaNipkow:SchorrWaite, + author = {Farhad Mehta and Tobias Nipkow}, + title = {Proving pointer programs in higher-order logic}, + journal = {Information and Computation}, + year = {2005}, + volume = {199}, + number = {1--2}, + pages = {200-227}, + month = may # "--" # jun, +} + +@InProceedings{BallEtAll:ScalableChecking, + author = {Thomas Ball and Brian Hackett and Shuvendu K. Lahiri + and Shaz Qadeer and Julien Vanegue}, + title = {Towards Scalable Modular Checking of User-Defined Properties}, + booktitle = {Verified Software: Theories, Tools, Experiments, + (VSTTE 2010)}, + editor = {Gary T. Leavens and Peter O'Hearn and Sriram K. Rajamani}, + volume = {6217}, + series = lncs, + publisher = {Springer}, + month = aug, + year = {2010}, + pages = {1-24}, +} + +@InProceedings{RegisGianasPottier:FunctionalHoare, + author = {Yann R{\'e}gis-Gianas and Fran{\,c}ois Pottier}, + title = {A {H}oare Logic for Call-by-Value Functional Programs}, + booktitle = {Mathematics of Program Construction, 9th International Conference, MPC 2008}, + pages = {305-335}, + year = {2008}, + editor = {Philippe Audebaud and Christine Paulin-Mohring}, + volume = {5133}, + series = lncs, + month = jul, + publisher = {Springer}, +} + +@InProceedings{VeanesEtAl:SpecExplorer, + author = {Margus Veanes and Colin Campbell and Wolfgang + Grieskamp and Wolfram Schulte and Nikolai Tillmann + and Lev Nachmanson}, + title = {Model-Based Testing of Object-Oriented Reactive + Systems with {Spec} {Explorer}}, + booktitle = {Formal Methods and Testing}, + pages = {39-76}, + year = {2008}, + editor = {Robert M. Hierons and Jonathan P. Bowen and Mark Harman}, + volume = {4949}, + series = lncs, + publisher = {Springer}, +} + +@book{Dijkstra:Discipline, + author = "Edsger W. Dijkstra", + title = "A Discipline of Programming", + publisher = "Prentice Hall", + address = "Englewood Cliffs, NJ", + year = 1976 +} + +@InProceedings{LeinoMueller:ESOP2009, + author = {K. Rustan M. Leino and Peter M{\"u}ller}, + title = {A Basis for Verifying Multi-threaded Programs}, + booktitle = {Programming Languages and Systems, 18th European + Symposium on Programming, ESOP 2009}, + editor = {Giuseppe Castagna}, + volume = {5502}, + series = lncs, + publisher = {Springer}, + month = mar, + year = 2009, + pages = {378-393}, +} + +@InProceedings{LeinoRuemmer:Boogie2, + author = {K. Rustan M. Leino and Philipp R{\"u}mmer}, + title = {A Polymorphic Intermediate Verification Language: + Design and Logical Encoding}, + booktitle = {Tools and Algorithms for the Construction and + Analysis of Systems, 16th International Conference, + TACAS 2010}, + editor = {Javier Esparza and Rupak Majumdar}, + series = lncs, + volume = 6015, + publisher = {Springer}, + month = mar, + year = 2010, + pages = {312-327}, +} + +@book{LiskovGuttag:book, + author = "Barbara Liskov and John Guttag", + title = "Abstraction and Specification in Program Development", + publisher = "MIT Press", + series = "MIT Electrical Engineering and Computer Science Series", + year = 1986 +} + +@TechReport{DahlEtAl:Simula67, + author = {Ole-Johan Dahl and Bj{\o}rn Myhrhaug and Kristen Nygaard}, + title = {Common Base Language}, + institution = {Norwegian Computing Center}, + type = {Publication}, + number = {S-22}, + month = oct, + year = 1970, +} + +@inproceedings{LeinoMueller:ModelFields, + author = {K. Rustan M. Leino and + Peter M{\"u}ller}, + title = {A Verification Methodology for Model Fields}, + booktitle = "Programming Languages and Systems, 15th European Symposium on Programming, ESOP 2006", + editor = "Peter Sestoft", + series = lncs, + volume = 3924, + publisher = "Springer", + month = mar, + year = 2006, + pages = {115-130}, +} + +@InProceedings{CarterEtAl:UsingPerfectDeveloper, + author = {Gareth Carter and Rosemary Monahan and Joseph M. Morris}, + title = {Software Refinement with {P}erfect {D}eveloper}, + booktitle = {Third IEEE International Conference on Software + Engineering and Formal Methods (SEFM 2005)}, + pages = {363-373}, + editor = {Bernhard K. Aichernig and Bernhard Beckert}, + month = sep, + year = {2005}, + publisher = {IEEE Computer Society}, +} + +@InProceedings{Abrial:SchorrWaite, + author = {Jean-Raymond Abrial}, + title = {Event Based Sequential Program Development: + Application to Constructing a Pointer Program}, + booktitle = {FME 2003: Formal Methods, International Symposium of + Formal Methods Europe}, + editor = {Keijiro Araki and Stefania Gnesi and Dino Mandrioli}, + volume = {2805}, + series = lncs, + publisher = {Springer}, + month = sep, + year = {2003}, + pages = {51-74}, +} + +@article{Barnett-etal04, + author = {Mike Barnett and Robert DeLine and Manuel F{\"a}hndrich and + K. Rustan M. Leino and Wolfram Schulte}, + title = {Verification of Object-Oriented Programs with Invariants}, + journal = {Journal of Object Technology}, + volume = 3, + number = 6, + year = 2004, + pages = {27-56}, +} + +@InProceedings{SmansEtAl:ImplicitDynamicFrames, + author = {Jan Smans and Bart Jacobs and Frank Piessens}, + title = {Implicit Dynamic Frames: Combining Dynamic Frames + and Separation Logic}, + booktitle = {ECOOP 2009 --- Object-Oriented Programming, 23rd + European Conference}, + editor = {Sophia Drossopoulou}, + volume = {5653}, + series = lncs, + publisher = {Springer}, + month = jul, + year = {2009}, + pages = {148-172}, +} + +@inproceedings{GriesPrins:Encapsulation, + author = "David Gries and Jan Prins", + title = "A New Notion of Encapsulation", + booktitle = "Proceedings of the {ACM} {SIGPLAN} 85 + Symposium on Language Issues in Programming Environments", + publisher = "ACM", + series = "SIGPLAN Notices 20", + number = 7, + month = jul, + year = 1985, + pages = "131-139" +} + +@InProceedings{YangHawblitzel:Verve, + author = {Jean Yang and Chris Hawblitzel}, + title = {Safe to the last instruction: automated verification of a type-safe operating system}, + booktitle = {Proceedings of the 2010 ACM SIGPLAN Conference on + Programming Language Design and Implementation, PLDI + 2010}, + editor = {Benjamin G. Zorn and Alexander Aiken}, + month = jun, + year = {2010}, + publisher = {ACM}, + pages = {99-110}, +} + +@Book{BoyerMoore:book, + author = {Robert S. Boyer and J Strother Moore}, + title = {A Computational Logic}, + publisher = {Academic Press}, + series = {ACM Monograph Series}, + year = {1979}, +} + +@article{HoareWirth:Pascal, + author = "C. A. R. Hoare and N. Wirth", + title = "An axiomatic definition of the programming language {PASCAL}", + journal = acta, + volume = 2, + number = 4, + year = 1973, + pages = "335-355" +} + +@article{Hoare:AxiomaticBasis, + author = "C. A. R. Hoare", + title = "An axiomatic basis for computer programming", + journal = cacm, + volume = 12, + number = 10, + year = 1969, + month = oct, + pages = "576--580,583" +} + +@InProceedings{LeinoMoskal:vacid0-notYetConfirmed, + author = {K. Rustan M. Leino and Micha{\l} Moskal}, + title = {{VACID-0}: {V}erification of {A}mple {C}orrectness + of {I}nvariants of {D}ata-structures, Edition 0}, + booktitle = {VS-Tools & Experiments}, + year = 2010, + editor = {Rajeev Joshi and Tiziana Margaria and Peter + M{\"u}ller and David Naumann and Hongseok Yang}, + series = {VSTTE 2010 Workshop Proceedings}, + publisher = {ETH Zurich Technical Report 676}, + month = aug, +} + +@InCollection{Chalice:tutorial, + author = {K. Rustan M. Leino and Peter M{\"u}ller and Jan Smans}, + title = {Verification of Concurrent Programs with {C}halice}, + booktitle = {Foundations of Security Analysis and Design {V}: {FOSAD} 2007/2008/2009 Tutorial Lectures}, + editor = {Alessandro Aldini and Gilles Barthe and Roberto Gorrieri}, + volume = {5705}, + series = lncs, + publisher = {Springer}, + year = {2009}, + pages = {195-222} +} + +@inproceedings{LeinoMuellerSmans10, + author = {K. Rustan M. Leino and Peter M{\"u}ller and Jan Smans}, + title = {Deadlock-Free Channels and Locks}, + booktitle = {Programming Languages and Systems, 19th European Symposium on Programming, ESOP 2010}, + editor = {Andrew D. Gordon}, + volume = {6012}, + series = lncs, + publisher = {Springer}, + month = mar, + year = {2010}, + pages = {407-426} +} + +@Book{BundyEtAl:Rippling, + author = {Alan Bundy and David Basin and Dieter Hutter and Andrew Ireland}, + title = {Rippling: Meta-level Guidance for Mathematical Reasoning}, + publisher = {Cambridge University Press}, + volume = {56}, + series = {Cambridge Tracts in Theoretical Computer Science}, + year = {2005}, +} + +@book{Gries:Science, + author = "David Gries", + title = "The Science of Programming", + publisher = "Springer-Verlag", + series = "Texts and Monographs in Computer Science", + year = 1981 +} + +@Book{DijkstraFeijen:Book, + author = "Edsger W. Dijkstra and W. H. J. Feijen", + title = "A Method of Programming", + publisher = "Addison-Wesley", + month = jul, + year = 1988, +} + +@book{Kaldewaij:Programming, + author = "Anne Kaldewaij", + title = "Programming: The Derivation of Algorithms", + publisher = "Prentice-Hall International", + year = 1990, + series = "Series in Computer Science", +} + +@InProceedings{LeinoMonahan:VSTTE2010, + author = {K. Rustan M. Leino and Rosemary Monahan}, + title = {Dafny Meets the Verification Benchmarks Challenge}, + booktitle = {Verified Software: Theories, Tools, Experiments, + Third International Conference, VSTTE 2010}, + pages = {112-126}, + year = {2010}, + editor = {Gary T. Leavens and Peter W. O'Hearn and Sriram K. Rajamani}, + volume = {6217}, + series = lncs, + month = aug, + publisher = {Springer}, +} + +@InProceedings{VSComp2010:report, + author = {Vladimir Klebanov and Peter M{\"u}ller and Natarajan Shankar and + Gary T. Leavens and Valentin W{\"u}stholz and Eyad Alkassar and + Rob Arthan and Derek Bronish and Rod Chapman and Ernie Cohen and + Mark Hillebrand and Bart Jacobs and K. Rustan M. Leino and + Rosemary Monahan and Frank Piessens and Nadia Polikarpova and + Tom Ridge and Jan Smans and Stephan Tobies and Thomas Tuerk and + Mattias Ulbrich and Benjamin Wei{\ss}}, + title = {The 1st Verified Software Competition: Experience Report}, + booktitle = {FM 2011: Formal Methods --- 17th International + Symposium on Formal Methods}, + pages = {154-168}, + year = {2011}, + editor = {Michael Butler and Wolfram Schulte}, + volume = {6664}, + series = lncs, + month = jun, + publisher = {Springer}, +} + +@InProceedings{Leino:Dafny:LPAR16, + author = {K. Rustan M. Leino}, + title = {Dafny: An Automatic Program Verifier for Functional Correctness}, + booktitle = {LPAR-16}, + year = {2010}, + volume = {6355}, + series = lncs, + publisher = {Springer}, + pages = {348-370}, +} + +@book{BackVonWright:Book, + author = "Ralph-Johan Back and von Wright, Joakim", + title = "Refinement Calculus: A Systematic Introduction", + series = "Graduate Texts in Computer Science", + publisher = "Springer-Verlag", + year = 1998 +} + +@Article{BalzerCheathamGreen:1990s, + author = {Robert Balzer and {Cheatham, Jr.}, Thomas E. and Cordell Green}, + title = {Software Technology in the 1990's: Using a New Paradigm}, + journal = {IEEE Computer}, + year = {1983}, + volume = {16}, + number = {11}, + pages = {39-45 }, + month = nov, +} + +@InProceedings{Zloof:QBE, + author = {Mosh{\'e} M. Zloof}, + title = {Query by Example}, + booktitle = {American Federation of Information Processing + Societies: 1975 National Computer Conference}, + pages = {431-438}, + year = {1975}, + month = may, + publisher = {AFIPS Press }, +} + +@InProceedings{HarrisGulwani:PLDI2011, + author = {William R. Harris and Sumit Gulwani}, + title = {Spreadsheet table transformations from examples}, + booktitle = {Proceedings of the 32nd ACM SIGPLAN Conference on + Programming Language Design and Implementation, PLDI + 2011}, + pages = {317-328}, + year = {2011}, + editor = {Mary W. Hall and David A. Padua}, + month = jun, + publisher = {ACM}, +} + +@Article{Smith:KIDS-overview, + author = "Douglas R. Smith", + title = "{KIDS}: A Semi-Automatic Program Development System", + journal = {IEEE Transactions on Software Engineering }, + volume = 16, + number = 9, + month = sep, + year = 1990, + pages = "1024-1043", +} + +@Article{RodinToolset, + author = {Jean-Raymond Abrial and Michael Butler and Stefan + Hallerstede and Thai Son Hoang and Farhad Mehta and + Laurent Voisin}, + title = {Rodin: An Open Toolset for Modelling and Reasoning in {Event-B}}, + journal = {International Journal on Software Tools for Technology Transfer}, + year = {2010}, + month = apr, +} + +@Article{Summers:LISP-from-examples, + author = {Phillip D. Summers}, + title = {A Methodology for {LISP} Program Construction from Examples}, + journal = jacm, + year = {1977}, + volume = {24}, + number = {1}, + pages = {161-175}, + month = jan, +} + +@InProceedings{Pex:overview, + author = {Nikolai Tillmann and de Halleux, Jonathan}, + title = {Pex---White Box Test Generation for {.NET}}, + booktitle = {Tests and Proofs, Second International Conference, TAP 2008}, + pages = {134-153}, + year = {2008}, + editor = {Bernhard Beckert and Reiner H{\"a}hnle}, + series = lncs, + volume = {4966}, + month = apr, + publisher = {Springer}, +} + +@InProceedings{GodefroidKlarlundSen:DART, + author = {Patrice Godefroid and Nils Klarlund and Koushik Sen}, + title = {{DART}: directed automated random testing}, + booktitle = {Proceedings of the ACM SIGPLAN 2005 Conference on + Programming Language Design and Implementation}, + pages = {213-223}, + year = {2005}, + editor = {Vivek Sarkar and Mary W. Hall}, + month = jun, + publisher = {ACM}, +} + +@PhdThesis{Monahan:thesis, + author = {Rosemary Monahan}, + title = {Data Refinement in Object-Oriented Verification}, + school = {Dublin City University}, + year = {2010}, +} + +@InProceedings{Denali:pldi2002, + author = {Rajeev Joshi and Greg Nelson and Keith H. Randall}, + title = {Denali: A Goal-directed Superoptimizer}, + booktitle = {Proceedings of the 2002 ACM SIGPLAN Conference on + Programming Language Design and Implementation + (PLDI)}, + pages = {304-314}, + year = {2002}, + month = jun, + publisher = {ACM}, +} +@Book{SETL, + author = {J. T. Schwartz and R. B. K. Dewar and E. Dubinsky and E. Schonberg}, + title = {Programming with Sets: An Introduction to {SETL}}, + series = {Texts and Monographs in Computer Science}, + publisher = {Springer}, + year = {1986}, +} + +@InProceedings{KuncakEtAl:PLDI2010, + author = {Viktor Kuncak and Mika{\"e}l Mayer and Ruzica Piskac + and Philippe Suter}, + title = {Complete functional synthesis}, + booktitle = {Proceedings of the 2010 ACM SIGPLAN Conference on + Programming Language Design and Implementation, PLDI + 2010}, + pages = {316-329}, + year = {2010}, + editor = {Benjamin G. Zorn and Alexander Aiken}, + month = jun, + publisher = {ACM}, +} + +@Article{JML:ToolSuite:STTT, + author = {Lilian Burdy and Yoonsik Cheon and David R. Cok and + Michael D. Ernst and Joseph R. Kiniry and Gary T. Leavens and + K. Rustan M. Leino and Erik Poll}, + title = {An overview of {JML} tools and applications}, + journal = {International Journal on Software Tools + for Technology Transfer}, + volume = 7, + number = 3, + publisher = {Springer}, + month = jun, + year = 2005, + pages = {212-232}, +} + +@InProceedings{Green:ProblemSolving, + author = {Cordell Green}, + title = {Application of Theorem Proving to Problem Solving}, + booktitle = {Proceedings of the 1st International Joint Conference on Artificial Intelligence}, + editor = {Donald E. Walker and Lewis M. Norton}, + pages = {219-240}, + year = {1969}, + month = may, + publisher = {William Kaufmann}, +} + +@Article{MannaWaldinger:CACM1971, + author = {Zohar Manna and Richard J. Waldinger}, + title = {Towards automatic program synthesis}, + journal = cacm, + year = {1971}, + volume = {14}, + number = {3}, + pages = {151-165}, + month = mar, +} + +@Article{RichWaters:ProgAppren, + author = {Charles Rich and Richard C. Waters}, + title = {The {P}rogrammer's {A}pprentice: A Research Overview}, + journal = {IEEE Computer}, + year = {1988}, + volume = {21}, + number = {11}, + pages = {10-25}, + month = nov, +} + +@InProceedings{Green:PSI, + author = {Cordell Green}, + title = {The Design of the {PSI} Program Synthesis System}, + booktitle = {Proceedings of the 2nd International Conference on Software Engineering}, + pages = {4-18}, + year = {1976}, + month = oct, + publisher = {IEEE Computer Society}, +} + +@Article{SpecSharp:Retrospective:CACM, + author = {Mike Barnett and Manuel F{\"a}hndrich and + K. Rustan M. Leino and Peter M{\"u}ller and + Wolfram Schulte and Herman Venter}, + title = {Specification and Verification: The {Spec\#} Experience}, + journal = cacm, + volume = {54}, + number = {6}, + pages = {81-91}, + month = jun, + year = 2011, +} + +@article{Filipovic:SepLogicRefinement, + author = {Ivana Filipovi{\'c} and Peter O'Hearn and + Noah Torp-Smith and Hongseok Yang}, + title = {Blaming the client: on data refinement in the presence of pointers}, + journal = {Formal Aspects of Computing}, + volume = {22}, + number = {5}, + month = sep, + year = {2010}, + pages = {547-583}, +} + +@inproceedings{Grandy:JavaRefinement, + author = {Grandy, Holger and Stenzel, Kurt and Reif, Wolfgang}, + title = {A refinement method for {J}ava programs}, + booktitle = {Formal Methods for Open Object-Based Distributed Systems, 9th IFIP WG 6.1 International Conference, FMOODS 2007}, + editor = {Marcello M. Bonsangue and Einar Broch Johnsen}, + series = lncs, + number = {4468}, + month = jun, + year = {2007}, + publisher = {Springer}, + pages = {221--235}, +} + +@InCollection{KoenigLeino:MOD2011, + author = {Jason Koenig and K. Rustan M. Leino}, + title = {Getting Started with {D}afny: A Guide}, + booktitle = {Software Safety and Security: Tools for Analysis and Verification}, + pages = {152-181}, + publisher = {IOS Press}, + year = {2012}, + editor = {Tobias Nipkow and Orna Grumberg and Benedikt Hauptmann}, + volume = {33}, + series = {NATO Science for Peace and Security Series D: Information and Communication Security}, + note = {Summer School Marktoberdorf 2011 lecture notes}, +} + +@InProceedings{VonWright:ExtendingWindowInference, + author = {von Wright, Joakim}, + title = {Extending Window Inference}, + booktitle = {Theorem Proving in Higher Order Logics, 11th International Conference, TPHOLs'98}, + pages = {17-32}, + year = {1998}, + editor = {Jim Grundy and Malcolm C. Newey}, + volume = {1479}, + series = lncs, + publisher = {Springer}, +} + +@InProceedings{BauerWenzel:IsarExperience, + author = {Gertrud Bauer and Markus Wenzel}, + title = {Calculational reasoning revisited: an {I}sabelle/{I}sar experience}, + booktitle = {Theorem Proving in Higher Order Logics, 14th International Conference, TPHOLs 2001}, + pages = {75-90}, + year = {2001}, + editor = {Richard J. Boulton and Paul B. Jackson}, + volume = {2152}, + series = lncs, + month = sep, + publisher = {Springer}, +} + +@InProceedings{Leino:induction, + author = {K. Rustan M. Leino}, + title = {Automating Induction with an {SMT} Solver}, + booktitle = {VMCAI 2012}, + pages = {315-331}, + year = {2012}, + volume = {7148}, + series = lncs, + month = jan, + publisher = {Springer}, +} + +@InProceedings{LGLM:BVD, + author = {Le Goues, Claire and K. Rustan M. Leino and Micha{\l} Moskal}, + title = {The {B}oogie {V}erification {D}ebugger (Tool Paper)}, + booktitle = {Software Engineering and Formal Methods --- 9th International Conference, SEFM 2011}, + pages = {407-414}, + year = {2011}, + editor = {Gilles Barthe and Alberto Pardo and Gerardo Schneider}, + volume = {7041}, + series = lncs, + month = nov, + publisher = {Springer}, +} + +@InProceedings{Filliatre:2lines, + author = {Jean-Christophe Filli{\^a}tre}, + title = {Verifying two lines of {C} with {Why3}: an exercise in + program verification}, + booktitle = {Verified Software: Theories, Tools, Experiments --- + 4th International Conference, VSTTE 2012}, + pages = {83-97}, + year = {2012}, + editor = {Rajeev Joshi and Peter M{\"u}ller and Andreas Podelski}, + volume = {7152}, + series = lncs, + month = jan, + publisher = {Springer}, +} + +@InCollection{LeinoMoskal:UsableProgramVerification, + author = {K. Rustan M. Leino and Micha{\l} Moskal}, + title = {Usable Auto-Active Verification}, + booktitle = {UV10 (Usable Verification) workshop}, + year = {2010}, + editor = {Tom Ball and Lenore Zuck and N. Shankar}, + month = nov, + publisher = {\url{http://fm.csl.sri.com/UV10/}}, +} + +@InProceedings{LeinoMonahan:Comprehensions, + author = {K. Rustan M. Leino and Rosemary Monahan}, + title = {Reasoning about Comprehensions with First-Order {SMT} Solvers}, + booktitle = {Proceedings of the 2009 ACM Symposium on Applied Computing (SAC)}, + editor = {Sung Y. Shin and Sascha Ossowski}, + publisher = {ACM}, + month = mar, + year = 2009, + pages = {615-622}, +} + +@TechReport{VeriFast:TR, + author = {Bart Jacobs and Frank Piessens}, + title = {The {VeriFast} program verifier}, + institution = {Dept. of Computer Science, Katholieke Universiteit Leuven}, + year = {2008}, + number = {CW-520}, +} + +@book{DijkstraScholten:book, + author = "Edsger W. Dijkstra and Carel S. Scholten", + title = "Predicate Calculus and Program Semantics", + publisher = "Springer-Verlag", + series = "Texts and Monographs in Computer Science", + year = 1990 +} + +@Book{Coq:book, + author = {Yves Bertot and Pierre Cast{\'e}ran}, + title = {{C}oq'{A}rt: The Calculus of Inductive Constructions}, + publisher = {Springer}, + year = {2004}, + series = {Texts in Theoretical Comp. Sci.}, +} + +@Book{ACL2:book, + author = {Matt Kaufmann and Panagiotis Manolios and J Strother Moore}, + title = {Computer-Aided Reasoning: An Approach}, + publisher = {Kluwer Academic Publishers}, + year = {2000}, +} + +@InProceedings{Coq:Coinduction, + author = {Eduardo Gim{\'e}nez}, + title = {An Application of Co-inductive Types in {Coq}: Verification of the Alternating Bit Protocol}, + booktitle = {Types for Proofs and Programs, International Workshop TYPES'95}, + pages = {135-152}, + year = {1996}, + editor = {Stefano Berardi and Mario Coppo}, + volume = 1158, + series = lncs, + publisher = {Springer}, +} + +@InCollection{JacobsRutten:IntroductionCoalgebra, + author = {Bart Jacobs and Jan Rutten}, + title = {An Introduction to (Co)Algebra and (Co)Induction}, + booktitle = {Advanced Topics in Bisimulation and Coinduction}, + series = {Cambridge Tracts in Theoretical Comp. Sci.}, + number = {52}, + publisher = {Cambridge Univ. Press}, + year = {2011}, + pages = {38-99}, +} + +@InProceedings{SonnexEtAl:Zeno, + author = {William Sonnex and Sophia Drossopoulou and Susan Eisenbach}, + title = {Zeno: An Automated Prover for Properties of Recursive + Data Structures}, + booktitle = {Tools and Algorithms for the Construction and Analysis of + Systems --- 18th International Conference, TACAS 2012}, + editor = {Cormac Flanagan and Barbara K{\"o}nig}, + volume = {7214}, + series = lncs, + year = {2012}, + month = mar # "--" # apr, + publisher = {Springer}, + pages = {407-421}, +} + +@InProceedings{JohanssonEtAl:IPT2010, + author = {Moa Johansson and Lucas Dixon and Alan Bundy}, + title = {Case-Analysis for {R}ippling and Inductive Proof}, + booktitle = {Interactive Theorem Proving, First International Conference, ITP 2010}, + editor = {Matt Kaufmann and Lawrence C. Paulson}, + volume = {6172}, + series = lncs, + publisher = {Springer}, + month = jul, + year = {2010}, + pages = {291-306}, +} + +@Article{HatcliffEtAl:BISL, + author = {John Hatcliff and Gary T. Leavens and + K. Rustan M. Leino and Peter M{\"u}ller and Matthew Parkinson}, + title = {Behavioral interface specification languages}, + journal = {ACM Computing Surveys}, + volume = {44}, + number = {3}, + note = {Article 16}, + month = jun, + year = {2012}, +} + +@InProceedings{BoehmeNipkow:Sledgehammer, + author = {Sascha B{\"o}hme and Tobias Nipkow}, + title = {Sledgehammer: {J}udgement {D}ay}, + booktitle = {Automated Reasoning, 5th International Joint Conference, IJCAR 2010}, + editor = {J{\"u}rgen Giesl and Reiner H{\"a}hnle}, + year = {2010}, + pages = {107-121}, + volume = {6173}, + series = lncs, + month = jul, + publisher = {Springer}, +} + +@InProceedings{Dafny:LASER2011, + author = {Luke Herbert and K. Rustan M. Leino and Jose Quaresma}, + title = {Using {Dafny}, an Automatic Program Verifier}, + booktitle = {Tools for Practical Software Verification, {LASER}, International Summer School 2011}, + editor = {Bertrand Meyer and Martin Nordio}, + volume = {7682}, + series = lncs, + year = {2012}, + pages = {156-181}, + publisher = {Springer}, +} + +@Article{Leroy:CompCert:CACM, + author = {Xavier Leroy}, + title = {Formal verification of a realistic compiler}, + journal = cacm, + volume = {52}, + number = {7}, + year = {2009}, + pages = {107-115}, +} + +@InProceedings{Leino:ITP2013, + author = {K. Rustan M. Leino}, + title = {Automating Theorem Proving with {SMT}}, + booktitle = {ITP 2013}, + year = {2013}, + volume = {7998}, + series = lncs, + pages = {2-16}, + month = jul, + publisher = {Springer}, +} + +@techreport{Nelson:thesis, + author = "Charles Gregory Nelson", + title = "Techniques for Program Verification", + institution = "Xerox PARC", + month = jun, + year = 1981, + number = "CSL-81-10", + note = "The author's PhD thesis" +} + +@InProceedings{LernerMillsteinChambers:VerifiedOptimizations, + author = {Sorin Lerner and Todd Millstein and Craig Chambers}, + title = {Automatically proving the correctness of compiler optimizations}, + booktitle = {Proceedings of the ACM SIGPLAN 2003 Conference on + Programming Language Design and Implementation 2003}, + year = {2003}, + editor = {Ron Cytron and Rajiv Gupta}, + pages = {220-231}, + month = jun, + publisher = {ACM}, +} + +@InProceedings{BoyerHunt:ACL2, + author = {Robert S. Boyer and Hunt, Jr., Warren A.}, + title = {Function Memoization and Unique Object Representation for {ACL2} Functions}, + booktitle = {Proceedings of the Sixth International Workshop on + the ACL2 Theorem Prover and its Applications, ACL2 2006}, + editor = {Panagiotis Manolios and Matthew Wilding}, + month = aug, + year = {2006}, + pages = {81--89}, + publisher = {ACM}, +} + +@inproceedings{LeinoWuestholz:DafnyIDE, + author = {K. Rustan M. Leino and + Valentin W{\"{u}}stholz}, + title = {The {D}afny Integrated Development Environment}, + booktitle = {Proceedings 1st Workshop on Formal Integrated Development Environment, + {F-IDE} 2014}, + month = apr, + year = {2014}, + pages = {3--15}, + editor = {Catherine Dubois and + Dimitra Giannakopoulou and + Dominique M{\'{e}}ry}, + series = {{EPTCS}}, + volume = {149}, +} + +@inproceedings{BarnettLeino:Weakest, + author = {Mike Barnett and K. Rustan M. Leino}, + title = {Weakest-precondition of unstructured programs}, + booktitle = {Proceedings of the 2005 ACM SIGPLAN-SIGSOFT Workshop on + Program Analysis For Software Tools and Engineering, + PASTE'05}, + editor = {Michael D. Ernst and Thomas P. Jensen}, + month = sep, + year = {2005}, + pages = {82-87}, + publisher = {ACM}, +} + +@InProceedings{AutoProof:TACAS2015, + author = {Julian Tschannen and Carlo A. Furia and Martin Nordio and Nadia Polikarpova}, + title = {{AutoProof}: Auto-Active Functional Verification of Object-Oriented Programs}, + booktitle = {Tools and Algorithms for the Construction and + Analysis of Systems --- 21st International Conference, + TACAS 2015}, + OPTyear = {2015}, + editor = {Christel Baier and Cesare Tinelli}, + volume = {9035}, + series = lncs, + pages = {566-580}, + month = apr, + publisher = {Springer}, +} + +@Article{Doyle:TMS, + author = {Jon Doyle}, + title = {A Truth Maintenance System}, + journal = {Artificial Intelligence}, + year = {1979}, + month = nov, + volume = {12}, + number = {3}, + pages = {231-272}, +} + +@InProceedings{LeinoMueller:SpecSharp:Tutorial, + author = {K. Rustan M. Leino and Peter M{\"u}ller}, + title = {Using the {Spec\#} Language, Methodology, and Tools to Write Bug-Free Programs}, + booktitle = {LASER Summer School 2007/2008}, + editor = {Peter M{\"u}ller}, + series = lncs, + volume = 6029, + year = 2010, + publisher = {Springer}, + pages = {91-139}, +} + +@inproceedings{TFNP-TACAS15, + author = {Julian Tschannen and Carlo A. Furia and Martin Nordio and Nadia Polikarpova}, + title = {{AutoProof}: Auto-active Functional Verification of Object-oriented Programs}, + booktitle = {Tools and Algorithms for the Construction and Analysis of Systems --- 21st International Conference, TACAS 2015}, + editor = {Christel Baier and Cesare Tinelli}, + series = lncs, + volume = {9035}, + month = apr, + year = {2015}, + publisher = {Springer}, + pages = {566-580}, +} + +@inproceedings{PTFM-FM14, + author = {Nadia Polikarpova and Julian Tschannen and Carlo A. Furia and Bertrand Meyer}, + title = {Flexible Invariants Through Semantic Collaboration}, + booktitle = {FM 2014}, + series = lncs, + volume = {8442}, + publisher = {Springer}, + month = may, + year = {2014}, + pages = {514-530} +} + +@InProceedings{VeriFast:Java:tutorial, + author = {Jan Smans and Bart Jacobs and Frank Piessens}, + title = {{VeriFast} for {J}ava: A Tutorial}, + booktitle = {Aliasing in Object-Oriented Programming. Types, Analysis and Verification}, + year = {2013}, + editor = {Dave Clarke and James Noble and Tobias Wrigstad}, + volume = {7850}, + series = lncs, + pages = {407-442}, + publisher = {Springer}, +} + +@InProceedings{Traits:ECOOP2003, + author = {Nathanael Sch{\"a}rli and St{\'e}phane Ducasse and Oscar Nierstrasz and Andrew P. Black}, + title = {Traits: Composable Units of Behaviour}, + booktitle = {ECOOP 2003 --- Object-Oriented Programming, 17th European Conference}, + editor = {Luca Cardelli}, + series = lncs, + volume = {2743}, + pages = {248-274}, + month = jul, + year = {2003}, + publisher = {Springer}, +} + +@Article{Traits:logic, + author = {Ferruccio Damiani and Johan Dovland and Einar Broch Johnsen and Ina Schaefer}, + title = {Verifying traits: an incremental proof system for fine-grained reuse}, + journal = {Formal Aspects of Computing}, + volume = {26}, + number = {4}, + pages = {761-793}, + month = jul, + year = {2014}, +} + +@inproceedings{LeinoPolikarpova:calc, + author = {K. Rustan M. Leino and + Nadia Polikarpova}, + title = {Verified Calculations}, + booktitle = {VSTTE 2013}, + series = lncs, + volume = 8164, + year = {2014}, + pages = {170-190}, + publisher = {Springer}, +} + +@Article{LeinoYessenov:ChaliceRefinement, + author = {K. Rustan M. Leino and Kuat Yessenov}, + title = {Stepwise refinement of heap-manipulating code in {C}halice}, + journal = {Formal Aspects of Computing}, + year = {2012}, + volume = {24}, + number = {4--6}, + pages = {519--535}, + month = jul, +} + +@article{Wirth:StepwiseRefinment, + author = "N. Wirth", + title = "{Program Development by Stepwise Refinement}", + journal = cacm, + volume = 14, + year = 1971, + pages = "221-227" +} + +@article{Dijkstra:Refinement, + author = "E. W. Dijkstra", + title = "A constructive approach to the problem of program correctness", + journal = "BIT", + volume = 8, + year = 1968, + pages = "174-186" +} + +@phdthesis{Back:thesis, + author = "R.-J. R. Back", + title = "On the Correctness of Refinement Steps in Program Development", + school = "University of Helsinki", + year = 1978, + note = "Report A-1978-4" +} + +@article{Morgan:SpecStmt, + author = "Carroll Morgan", + title = "The Specification Statement", + journal = toplas, + volume = 10, + number = 3, + year = 1988, + month = jul, + pages = "403-419" +} + +@book{Morgan:book, + author = "Carroll Morgan", + title = "Programming from Specifications", + publisher = "Prentice-Hall International", + series = "Series in Computer Science", + year = 1990 +} + +@article{Morris:Refinement, + author = "Joseph M. Morris", + title = "A theoretical basis for stepwise refinement and the + programming calculus", + journal = scp, + volume = 9, + number = 3, + month = dec, + year = 1987, + pages = "287-306" +} + +@article{GriesVolpano:Transform, + author = "David Gries and Dennis Volpano", + title = "The Transform --- a New Language Construct", + journal = "Structured Programming", + volume = 11, + number = 1, + year = 1990, + pages = "1-10" +} + +@Book{Abrial:BBook, + author = "J.-R. Abrial", + title = "The {B}-Book: Assigning Programs to Meanings", + publisher = "Cambridge University Press", + year = 1996 +} + +@Book{Jones:VDM:book, + Author = "Cliff B. Jones", + Title = "Systematic Software Development Using {VDM}", + Publisher = "Prentice Hall", + Series = "International Series in Computer Science", + Address = "Englewood Cliffs, N.J.", + Edition = "Second", + Year = 1990 +} + +@Book{Abrial:EventB:book, + author = {Jean-Raymond Abrial}, + title = {Modeling in {Event-B}: System and Software Engineering}, + publisher = {Cambridge University Press}, + year = {2010}, +} + +@Misc{ClearSy:AtelierB, + author = {ClearSy}, + title = {Atelier {B}}, + howpublished = {\url{http://www.atelierb.eu/}}, +} + +@InProceedings{Abrial:FM-in-practice, + author = {Jean-Raymond Abrial}, + title = {Formal methods in industry: achievements, problems, future}, + booktitle = {28th International Conference on Software Engineering (ICSE 2006)}, + editor = {Leon J. Osterweil and H. Dieter Rombach and Mary Lou Soffa}, + month = may, + year = {2006}, + publisher = {ACM}, + pages = {761-768}, +} + +@InProceedings{MartinEtAl:AsynchMIPS, + author = {Alain J. Martin and Andrew Lines and Rajit Manohar + and Mika Nystr{\"o}m and Paul I. P{\'e}nzes and + Robert Southworth and Uri Cummings}, + title = {The Design of an Asynchronous {MIPS} {R3000} Microprocessor}, + booktitle = {17th Conference on Advanced Research in VLSI {ARVLSI '97}}, + month = sep, + year = {1997}, + publisher = {IEEE Computer Society}, + pages = {164-181}, +} + +@Book{Abrial:EventB-book, + author = {Jean-Raymond Abrial}, + title = {Modeling in {Event-B}: System and Software Engineering}, + publisher = {Cambridge University Press}, + year = {2010}, +} + +@Article{BackSere:ActionSystems, + author = {Ralph-Johan Back and Kaisa Sere}, + title = {Stepwise Refinement of Action Systems}, + journal = {Structured Programming}, + year = {1991}, + volume = {12}, + number = {1}, + pages = {17-30}, +} + +@InProceedings{VCC:overview, + author = {Ernie Cohen and Markus Dahlweid and Mark Hillebrand and Dirk Leinenbach and + Micha{\l} Moskal and Thomas Santen and Wolfram Schulte and Stephan Tobies}, + title = {{VCC}: A Practical System for Verifying Concurrent {C}}, + booktitle = {Theorem Proving in Higher Order Logics, 22nd International Conference, TPHOLs 2009}, + editor = {Stefan Berghofer and Tobias Nipkow and Christian Urban and Makarius Wenzel}, + volume = {5674}, + series = LNCS, + publisher = {Springer}, + month = aug, + year = {2009}, + pages = {23-42}, +} + +@InProceedings{BallEtAll:ScalableChecking, + author = {Thomas Ball and Brian Hackett and Shuvendu K. Lahiri + and Shaz Qadeer and Julien Vanegue}, + title = {Towards Scalable Modular Checking of User-Defined Properties}, + booktitle = {Verified Software: Theories, Tools, Experiments, + (VSTTE 2010)}, + editor = {Gary T. Leavens and Peter O'Hearn and Sriram K. Rajamani}, + volume = {6217}, + series = lncs, + publisher = {Springer}, + month = aug, + year = {2010}, + pages = {1-24}, +} + +@techreport{ESC:rr, + author = "David L. Detlefs and K. Rustan M. Leino and Greg Nelson + and James B. Saxe", + title = "Extended static checking", + institution = "Compaq Systems Research Center", + month = dec, + year = 1998, + type = "Research Report", + number = 159 +} + +@InProceedings{VeanesEtAl:SpecExplorer, + author = {Margus Veanes and Colin Campbell and Wolfgang + Grieskamp and Wolfram Schulte and Nikolai Tillmann + and Lev Nachmanson}, + title = {Model-Based Testing of Object-Oriented Reactive + Systems with {Spec} {Explorer}}, + booktitle = {Formal Methods and Testing}, + pages = {39-76}, + year = {2008}, + editor = {Robert M. Hierons and Jonathan P. Bowen and Mark Harman}, + volume = {4949}, + series = lncs, + publisher = {Springer}, +} + +@article{Hoare:DataRepresentations, + author = "C. A. R. Hoare", + title = "Proof of correctness of data representations", + journal = acta, + volume = 1, + number = 4, + year = 1972, + pages = "271-281" +} + +@manual{baudin09acsl, + title = {{ACSL}: {ANSI}/{ISO} {C} Specification Language, version 1.4}, + author = {Patrick Baudin and Jean-Christophe Filli{\^a}tre and + Claude March{\'e} and Benjamin Monate and Yannick + Moy and Virgile Prevosto}, + year = 2009, + note = {\url{http://frama-c.com/}} +} + +@InProceedings{BarnettEtAl:Boogie, + author = "Mike Barnett and Bor-Yuh Evan Chang and Robert DeLine and + Bart Jacobs and K. Rustan M. Leino", + title = "{B}oogie: A Modular Reusable Verifier for Object-Oriented Programs", + booktitle = "Formal Methods for Components and Objects: 4th + International Symposium, FMCO 2005", + editor = "de Boer, Frank S. and Marcello M. Bonsangue and + Susanne Graf and de Roever, Willem-Paul", + series = lncs, + volume = 4111, + publisher = "Springer", + month = sep, + year = 2006, + pages = "364-387" +} + +@inproceedings{deMouraBjorner:Z3:overview, + author = "de Moura, Leonardo and Nikolaj Bj{\o}rner", + title = {{Z3}: An efficient {SMT} solver}, + booktitle = {TACAS 2008}, + series = lncs, + volume = 4963, + publisher = {Springer}, + month = mar # "--" # apr, + year = 2008, + pages = {337-340}, +} + +@Article{Back-Mikhajlova-vonWright:ClassRefinement, + author = {Ralph-Johan Back and Anna Mikhaljova and von Wright, Joakim}, + title = {Class Refinement as Semantics of Correct Object Substitutability}, + journal = {Formal Aspects of Computing}, + volume = {12}, + number = {1}, + year = {2000}, + month = oct, + pages = {18-40}, +} + +@InProceedings{MikhajlovaSekerinski:ClassRefinement, + author = {Anna Mikhaljova and Emil Sekerinski}, + title = {Class Refinement and Interface Refinement in Object-Oriented Programs}, + booktitle = {FME '97: Industrial Applications and Strengthened + Foundations of Formal Methods, 4th International + Symposium of Formal Methods Europe}, + editor = {John S. Fitzgerald and Cliff B. Jones and Peter Lucas}, + volume = {1313 }, + series = lncs, + publisher = {Springer}, + month = sep, + year = {1997}, + pages = {82-101}, +} + +@InProceedings{LeinoMueller:ESOP2009, + author = {K. Rustan M. Leino and Peter M{\"u}ller}, + title = {A Basis for Verifying Multi-threaded Programs}, + booktitle = {Programming Languages and Systems, 18th European + Symposium on Programming, ESOP 2009}, + editor = {Giuseppe Castagna}, + volume = {5502}, + series = lncs, + publisher = {Springer}, + month = mar, + year = 2009, + pages = {378-393}, +} + +@InCollection{Chalice:tutorial, + author = {K. Rustan M. Leino and Peter M{\"u}ller and Jan Smans}, + title = {Verification of Concurrent Programs with {C}halice}, + booktitle = {Foundations of Security Analysis and Design {V}: {FOSAD} 2007/2008/2009 Tutorial Lectures}, + editor = {Alessandro Aldini and Gilles Barthe and Robert Gorrieri}, + volume = {5705}, + series = lncs, + publisher = {Springer}, + year = {2009}, + pages = {195-222}, +} + +@InProceedings{LeinoRuemmer:Boogie2, + author = {K. Rustan M. Leino and Philipp R{\"u}mmer}, + title = {A Polymorphic Intermediate Verification Language: + Design and Logical Encoding}, + booktitle = {Tools and Algorithms for the Construction and + Analysis of Systems, 16th International Conference, + TACAS 2010}, + editor = {Javier Esparza and Rupak Majumdar}, + series = lncs, + volume = 6015, + publisher = {Springer}, + month = mar, + year = 2010, + pages = {312-327}, +} + +@book{LiskovGuttag:book, + author = "Barbara Liskov and John Guttag", + title = "Abstraction and Specification in Program Development", + publisher = "MIT Press", + series = "MIT Electrical Engineering and Computer Science Series", + year = 1986 +} + +@TechReport{DahlEtAl:Simula67, + author = {Ole-Johan Dahl and Bj{\o}rn Myhrhaug and Kristen Nygaard}, + title = {Common Base Language}, + institution = {Norwegian Computing Center}, + type = {Publication}, + number = {S-22}, + month = oct, + year = 1970, +} + +@InProceedings{tafat10foveoos, + author = {Asma Tafat and Sylvain Boulm\'e and Claude March\'e}, + title = {A Refinement Methodology for Object-Oriented Programs}, + booktitle = {Formal Verification of Object-Oriented Software, Papers + Presented at the International Conference}, + editor = {Bernhard Beckert and Claude March\'e}, + month = jun, + year = 2010, + pages = {143--159}, +} + +@inproceedings{LeinoMueller:ModelFields, + author = {K. Rustan M. Leino and + Peter M{\"u}ller}, + title = {A Verification Methodology for Model Fields}, + booktitle = "Programming Languages and Systems, 15th European Symposium on Programming, ESOP 2006", + editor = "Peter Sestoft", + series = lncs, + volume = 3924, + publisher = "Springer", + month = mar, + year = 2006, + pages = {115-130}, +} + +@InProceedings{CarterEtAl:UsingPerfectDeveloper, + author = {Gareth Carter and Rosemary Monahan and Joseph M. Morris}, + title = {Software Refinement with {P}erfect {D}eveloper}, + booktitle = {Third IEEE International Conference on Software + Engineering and Formal Methods (SEFM 2005)}, + pages = {363-373}, + editor = {Bernhard K. Aichernig and Bernhard Beckert}, + month = sep, + year = {2005}, + publisher = {IEEE Computer Society}, +} + +@InProceedings{Abrial:SchorrWaite, + author = {Jean-Raymond Abrial}, + title = {Event Based Sequential Program Development: + Application to Constructing a Pointer Program}, + booktitle = {FME 2003: Formal Methods, International Symposium of + Formal Methods Europe}, + editor = {Keijiro Araki and Stefania Gnesi and Dino Mandrioli}, + volume = {2805}, + series = lncs, + publisher = {Springer}, + month = sep, + year = {2003}, + pages = {51-74}, +} + +@article{Barnett-etal04, + author = {Mike Barnett and Robert DeLine and Manuel F{\"a}hndrich and + K. Rustan M. Leino and Wolfram Schulte}, + title = {Verification of Object-Oriented Programs with Invariants}, + journal = {Journal of Object Technology}, + volume = 3, + number = 6, + year = 2004, + pages = {27-56}, +} + +@TechReport{HatcliffEtAl:survey-tr, + author = {John Hatcliff and Gary T. Leavens and K. Rustan M. Leino and + Peter M{\"u}ller and Matthew Parkinson}, + title = {Behavioral Interface Specification Languages}, + institution = {University of Central Florida, School of EECS}, + month = oct, + year = {2010}, + number = {CS-TR-09-01a}, +} + +@Article{HatcliffEtAl:survey:journal:tentativeInfo, + author = {John Hatcliff and Gary T. Leavens and K. Rustan M. Leino and + Peter M{\"u}ller and Matthew Parkinson}, + title = {Behavioral Interface Specification Languages}, + journal = {ACM Computing Surveys}, + year = {2012}, + volume = {44}, + number = {3}, + month = may, +} + +@inproceedings{Boyland:SAS2003, + author = {John Boyland}, + title = {Checking Interference with Fractional Permissions}, + booktitle = "Static Analysis, 10th International Symposium, SAS 2003", + editor = {Radhia Cousot}, + series = lncs, + volume = 2694, + publisher = "Springer", + year = 2003, + pages = {55-72} +} + +@InProceedings{SmansEtAl:ImplicitDynamicFrames, + author = {Jan Smans and Bart Jacobs and Frank Piessens}, + title = {Implicit Dynamic Frames: Combining Dynamic Frames + and Separation Logic}, + booktitle = {ECOOP 2009 --- Object-Oriented Programming, 23rd + European Conference}, + editor = {Sophia Drossopoulou}, + volume = {5653}, + series = lncs, + publisher = {Springer}, + month = jul, + year = {2009}, + pages = {148-172}, +} + +@Misc{Escher, + author = "{Escher Technologies, Inc.}", + title = "Getting started with {P}erfect", + howpublished = "\url{http://www.eschertech.com}", + year = 2001 +} + +@Article{LeinoNelson:tome, + author = "K. Rustan M. Leino and Greg Nelson", + title = "Data abstraction and information hiding", + journal = toplas, + month = sep, + year = 2002, + volume = 24, + number = 5, + pages = "491-553" +} + +@InProceedings{Clarke-Drossopoulou02, + author = {Dave Clarke and Sophia Drossopoulou}, + title = {Ownership, encapsulation and the disjointness of + type and effect}, + booktitle = {Proceedings of the 2002 ACM SIGPLAN Conference on + Object-Oriented Programming Systems, Languages and + Applications, OOPSLA 2002}, + publisher = {ACM}, + Month = nov, + Year = 2002, + pages = {292--310}, +} + +@InProceedings{Reynolds:SepLogic, + author = {John C. Reynolds}, + title = {Separation Logic: A Logic for Shared Mutable Data Structures}, + booktitle = {17th IEEE Symposium on Logic in Computer Science (LICS 2002)}, + publisher = {IEEE Computer Society}, + year = {2002}, + month = jul, + pages = {55-74}, +} + +# References supplied by the reviewers. +# Added 12/20/11. + +@incollection{Potet:BComposition, + author = {Potet, Marie and Rouzaud, Yann}, + affiliation = {LSR-IMAG Grenoble France}, + title = {Composition and refinement in the B-method}, + booktitle = {B￿￿98: Recent Advances in the Development and Use of the B Method}, + series = lncs, + editor = {Bert, Didier}, + publisher = {Springer Berlin / Heidelberg}, + isbn = {978-3-540-64405-7}, + keyword = {Computer Science}, + pages = {46-65}, + volume = {1393}, + url = {http://dx.doi.org/10.1007/BFb0053355}, + note = {10.1007/BFb0053355}, + year = {1998} +} + +@inproceedings{Grandy:JavaRefinement, + author = {Grandy, Holger and Stenzel, Kurt and Reif, Wolfgang}, + title = {A refinement method for {J}ava programs}, + booktitle = {Formal Methods for Open Object-Based Distributed Systems, 9th IFIP WG 6.1 International Conference, FMOODS 2007}, + editor = {Marcello M. Bonsangue and Einar Broch Johnsen}, + series = lncs, + number = {4468}, + month = jun, + year = {2007}, + publisher = {Springer}, + pages = {221--235}, +} + +@inproceedings{Wehrheim:Subtypes, + author = {Heike Wehrheim}, + title = {Checking Behavioural Subtypes via Refinement}, + booktitle = {FMOODS}, + year = {2002}, + pages = {79-93}, + crossref = {DBLP:conf/fmoods/2002}, + bibsource = {DBLP, http://dblp.uni-trier.de} +} + +@article{Banerjee:ownership, + author = {Banerjee, Anindya and Naumann, David A.}, + title = {Ownership confinement ensures representation independence for object-oriented programs}, + journal = jacm, + volume = {52}, + issue = {6}, + month = {November}, + year = {2005}, + issn = {0004-5411}, + pages = {894--960}, + numpages = {67}, + url = {http://doi.acm.org/10.1145/1101821.1101824}, + doi = {http://doi.acm.org/10.1145/1101821.1101824}, + acmid = {1101824}, + publisher = {ACM}, + address = {New York, NY, USA}, + keywords = {Alias control, confinement, data refinement, relational parametricity, simulation}, +} + +@Article{SpecSharp:Retrospective:CACM, + author = {Mike Barnett and Manuel F{\"a}hndrich and + K. Rustan M. Leino and Peter M{\"u}ller and + Wolfram Schulte and Herman Venter}, + title = {Specification and Verification: The {Spec\#} Experience}, + journal = cacm, + volume = {54}, + number = {6}, + pages = {81-91}, + month = jun, + year = 2011, +} + +@InProceedings{Heule:FractionsWithoutFractions, + author = {Stefan Heule and K. Rustan M. Leino and Peter + M{\"u}ller and Alexander J. Summers}, + title = {Fractional Permissions without the Fractions}, + booktitle = {13th Workshop on Formal Techniques for Java-like + Programs, FTfJP 2011}, + year = {2011}, + month = jul, +} + +@incollection{Morgan:Capjunctive, + author = "Carroll Morgan", + title = "The Cuppest Capjunctive Capping, and {G}alois", + editor = "A. W. Roscoe", + booktitle = "A Classical Mind: Essays in Honour of C.A.R. Hoare", + publisher = "Prentice-Hall", + series = "International Series in Computer Science", + pages = "317-332", + year = 1994 +} + +@Article{Morgan:CompositionalNoninterference, + author = {Carroll Morgan}, + title = {Compositional noninterference from first principles}, + journal = fac, + year = {2012}, + volume = {24}, + number = {1}, + pages = {3-26}, +} + +@article{DenningDenning:Certification, + author = "Dorothy E. Denning and Peter J. Denning", + title = "Certification of Programs for Secure Information Flow", + journal = cacm, + volume = 20, + number = 7, + month = jul, + year = 1977, + pages = "504-513" +} + +@article{Jones:Interference, + author = "C. B. Jones", + title = "Accommodating interference in the formal design of + concurrent object-based programs", + journal = "Formal Methods in System Design", + volume = 8, + number = 2, + pages = "105-122", + month = mar, + year = 1996 +} + +@Book{Jackson:Alloy:book, + author = {Daniel Jackson}, + title = {Software Abstractions: Logic, Language, and Analysis}, + publisher = {MIT Press}, + year = {2006}, +} + +@inproceedings{LeuschelButler:FME03, + author = {Michael Leuschel and Michael Butler}, + title = {Pro{B}: A Model Checker for {B}}, + booktitle = {FME 2003: Formal Methods}, + editor = {Araki, Keijiro and Gnesi, Stefania and Mandrioli, Dino}, + publisher = {Springer}, + series = lncs, + number = {2805}, + year = 2003, + pages = {855-874}, +} + +@InProceedings{ParkinsonBierman:POPL2005, + author = {Matthew J. Parkinson and Gavin M. Bierman}, + title = {Separation logic and abstraction}, + booktitle = {Proceedings of the 32nd ACM SIGPLAN-SIGACT Symposium + on Principles of Programming Languages, POPL 2005}, + publisher = {ACM}, + month = jan, + year = {2005}, + pages = {247-258}, +} + +@Article{LiskovWing94, + author = "Barbara Liskov and Jeannette M. Wing", + title = "A Behavioral Notion of Subtyping", + journal = toplas, + year = 1994, + volume = 16, + number = 6 +} + +@book{WoodcockDavies:UsingZ, + title = "Using {Z}: Specification, Refinement, and Proof", + author = "Jim Woodcock and Jim Davies", + year = "1996", + publisher = "Prentice Hall International", +} + +@Article{Leavens:ModularOOSpecs, + author = {Gary T. Leavens}, + title = {Modular Specification and Verification of Object-Oriented Programs}, + journal = {IEEE Software}, + year = {1991}, + volume = {8}, + number = {4}, + pages = {72-80}, +} + +@InProceedings{ShieldHayes:InvsAndDynConstraints, + author = {Jamie Shield and Ian J. Hayes}, + title = {Refining Object-Oriented Invariants and Dynamic Constraints}, + booktitle = {9th Asia-Pacific Software Engineering Conference (APSEC 2002)}, + pages = {52-61}, + year = {2002}, + publisher = {IEEE Computer Society}, +} + +@TechReport{Chalice:predicates:TR, + author = {S. Heule and I. T. Kassios and P. M\"uller and A. J. Summers}, + title = {Verification Condition Generation for Permission Logics with Abstraction Functions}, + institution = {ETH Zurich}, + year = {2012}, + number = {761} +} + +@InCollection{KleinEtAl:DataRefinement, + author = {Gerwin Klein and Thomas Sewell and Simon Winwood}, + title = {Refinement in the formal verification of {seL4}}, + booktitle = {Design and Verification of Microprocessor Systems for High-Assurance Applications}, + editor = {David S. Hardin}, + pages = {323--339}, + month = Mar, + year = {2010}, + publisher = {Springer}, +} + +@InProceedings{DharaLeavens:forcing, + author = {Krishna Kishore Dhara and Gary T. Leavens}, + title = {Forcing Behavioral Subtyping through Specification Inheritance}, + booktitle = {18th International Conference on Software Engineering}, + year = {1996}, + editor = {H. Dieter Rombach and T. S. E. Maibaum and Marvin V. Zelkowitz}, + pages = {258-267}, + month = mar, + publisher = {IEEE Computer Society}, +} + +@InProceedings{SpiritOfGhostCode, + author = {Jean-Christophe Filli{\^a}tre and L{\'e}on Gondelman and Andrei Paskevich}, + title = {The Spirit of Ghost Code}, + booktitle = {Computer Aided Verification --- 26th International Conference, CAV 2014}, + year = {2014}, + editor = {Armin Biere and Roderick Bloem}, + series = lncs, + volume = {8559}, + pages = {1-16}, + month = jul, + publisher = {Springer}, +} + +@InProceedings{Dafny:traits, + author = {Reza Ahmadi and K. Rustan M. Leino and Jyrki Nummenmaa}, + title = {Automatic Verification of {D}afny Programs with Traits}, + booktitle = {Formal Techniques for {J}ava-like Programs, FTfJP 2015}, + year = {2015}, + editor = {Rosemary Monahan}, + publisher = {ACM}, +} + +@InProceedings{Dafny:Cloudmake, + author = {Maria Christakis and K. Rustan M. Leino and Wolfram Schulte}, + title = {Formalizing and Verifying a Modern Build Language}, + booktitle = {FM 2014: Formal Methods --- 19th International Symposium}, + year = {2014}, + editor = {Cliff B. Jones and Pekka Pihlajasaari and Jun Sun}, + volume = {8442}, + series = lncs, + pages = {643-657}, + month = may, + publisher = {Springer}, +} + +@Misc{Leino:SPLASH2012:keynote, + author = {K. Rustan M. Leino}, + title = {Staged Program Development}, + howpublished = {SPLASH 2012 keynote}, + note = {InfoQ video, \url{http://www.infoq.com/presentations/Staged-Program-Development}}, + month = oct, + year = {2012}, +} + +@article{Parnas:secret, + author = "D. L. Parnas", + title = "On the criteria to be used in decomposing systems into modules", + journal = cacm, + volume = 15, + number = 12, + month = dec, + year = 1972, + pages = "1053-1058", + note = "Reprinted as {\tt www.acm.org/classics/may96/}" +} + +@InProceedings{KIV:overview, + author = {Wolfgang Reif}, + title = {The {KIV} System: Systematic Construction of Verified Software}, + booktitle = {Automated Deduction --- CADE-11, 11th International Conference + on Automated Deduction}, + editor = {Deepak Kapur}, + series = lncs, + volume = {607}, + publisher = {Springer}, + pages = {753-757}, + month = jun, + year = {1992}, +} + +@InProceedings{LeinoMoskal:Coinduction, + author = {K. Rustan M. Leino and Micha{\l} Moskal}, + title = {Co-induction Simply --- Automatic Co-inductive Proofs in a Program Verifier}, + booktitle = {FM 2014}, + series = lncs, + volume = {8442}, + publisher = {Springer}, + month = may, + year = {2014}, + pages = {382-398}, +} + +@Article{Tarski:theorem, + author = "Alfred Tarski", + title = "A lattice-theoretical fixpoint theorem and its applications", + journal = "Pacific Journal of Mathematics", + year = 1955, + volume = 5, + pages = "285-309" +} + +@TechReport{KozenSilva:Coinduction, + author = {Dexter Kozen and Alexandra Silva}, + title = {Practical coinduction}, + institution = {Comp. and Inf. Science, Cornell Univ.}, + year = {2012}, + number = {\url{http://hdl.handle.net/1813/30510}}, +} + +@book{Milner:CCS, + author = "Robin Milner", + title = {A Calculus of Communicating Systems}, + year = {1982}, + publisher = {Springer}, +} + +@Book{NipkowKlein:ConcreteSemantics, + author = {Tobias Nipkow and Gerwin Klein}, + title = {Concrete Semantics with {I}sabelle/{HOL}}, + publisher = {Springer}, + year = {2014}, +} + +@Book{Pierce:SoftwareFoundations, + author = {Benjamin C. Pierce and Chris Casinghino and + Marco Gaboardi and Michael Greenberg and + C{\u{a}}t{\u{a}}lin Hri\c{t}cu and Vilhelm Sj{\"o}berg and + Brent Yorgey}, + title = {Software Foundations}, + publisher = {\url{http://www.cis.upenn.edu/~bcpierce/sf}}, + year = {2015}, + edition = {version 3.2}, + month = jan, +} + +@InProceedings{ClochardEtAl:SemanticsInWhy3, + author = {Martin Clochard and Jean-Christophe + Filli\^atre and Claude March\'e and Andrei Paskevich}, + title = {Formalizing Semantics with an Automatic Program Verifier}, + booktitle = {VSTTE 2014}, + volume = {8471}, + series = lncs, + publisher = {Springer}, + month = jul, + year = {2014}, + pages = {37--51}, +} + +@Article{LeroyGrall:CoinductiveBigStep, + author = {Xavier Leroy and Herv\'e Grall}, + title = {Coinductive big-step operational semantics}, + journal = {Information and Computation}, + volume = {207}, + number = {2}, + pages = {284-304}, + month = feb, + year = {2009}, +} + +@InProceedings{SwamyEtAl:Fstar2011, + author = {Nikhil Swamy and Juan Chen and C{\'e}dric Fournet and + Pierre-Yves Strub and Karthikeyan Bhargavan and Jean Yang}, + title = {Secure distributed programming with value-dependent types}, + booktitle = {ICFP 2011}, + publisher = {ACM}, + month = sep, + year = {2011}, + pages = {266-278}, +} + +@Book{Nipkow-Paulson-Wenzel02, + author = {Tobias Nipkow and Lawrence C. Paulson and Markus Wenzel}, + title = {{Isabelle/HOL} --- A Proof Assistant for Higher-Order Logic}, + publisher = {Springer}, + year = 2002, + volume = 2283, + series = LNCS, +} + +@InProceedings{BoveDybjerNorell:BriefAgda, + author = {Ana Bove and Peter Dybjer and Ulf Norell}, + title = {A Brief Overview of {A}gda --- A Functional Language with Dependent Types}, + booktitle = {TPHOLs 2009}, + series = lncs, + volume = {5674}, + publisher = {Springer}, + month = aug, + year = {2009}, + pages = {73-78}, +} + +@InProceedings{PaulinMohring:InductiveCoq, + author = {Christine Paulin-Mohring}, + title = {Inductive Definitions in the system {C}oq --- Rules and Properties}, + booktitle = {TLCA '93}, + series = lncs, + volume = {664}, + pages = {328-345}, + year = {1993}, + publisher = {Springer}, +} + +@TechReport{CamilleriMelham:InductiveRelations, + author = {Juanito Camilleri and Tom Melham}, + title = {Reasoning with Inductively Defined Relations + in the {HOL} Theorem Prover}, + institution = {University of Cambridge Computer Laboratory}, + year = {1992}, + number = {265}, + OPTmonth = aug, +} + +@Book{Winskel:FormalSemantics, + author = {Glynn Winskel}, + title = {The Formal Semantics of Programming Languages: An Introduction}, + publisher = {MIT Press}, + year = {1993}, +} + +@inproceedings{Paulson:CADE1994, + author = {Lawrence C. Paulson}, + title = {A Fixedpoint Approach to Implementing (Co)Inductive Definitions}, + booktitle = {CADE-12}, + editor = {Alan Bundy}, + volume = {814}, + series = lncs, + publisher = {Springer}, + year = {1994}, + pages = {148-161}, +} + +@InProceedings{Harrison:InductiveDefs, + author = {John Harrison}, + title = {Inductive Definitions: Automation and Application}, + booktitle = {TPHOLs 1995}, + year = {1995}, + editor = {E. Thomas Schubert and Phillip J. Windley and Jim Alves-Foss}, + volume = {971}, + series = lncs, + pages = {200-213}, + publisher = {Springer}, +} + +@Article{ManoliosMoore:PartialFunctions, + author = {Panagiotis Manolios and J Strother Moore}, + title = {Partial Functions in {ACL2}}, + journal = {Journal of Automated Reasoning}, + year = {2003}, + volume = {31}, + number = {2}, + pages = {107-127}, +} + +@PhdThesis{Krauss:PhD, + author = {Alexander Krauss}, + title = {Automating Recursive Definitions and Termination Proofs in Higher-Order Logic}, + school = {Technische Universit{\"a}t M{\"u}nchen}, + year = {2009}, +} + diff -Nru dafny-1.9.5/Docs/DafnyRef/madoko.css dafny-1.9.7/Docs/DafnyRef/madoko.css --- dafny-1.9.5/Docs/DafnyRef/madoko.css 1970-01-01 00:00:00.000000000 +0000 +++ dafny-1.9.7/Docs/DafnyRef/madoko.css 2016-06-05 21:11:14.000000000 +0000 @@ -0,0 +1,471 @@ +/* --------------------------------------------------- + Various settings to display madoko elements correctly. + For example, lines in tables or a table of contents. + + All rules use specific madoko classes and never just + a generic element. This means one can safely include + this CSS into any web page without affecting non-madoko + content. +----------------------------------------------------*/ + +/* The table of contents */ +.madoko .toc>.tocblock .tocblock .tocblock { + margin-left: 2.5em; +} + +.madoko .toc>.tocblock .tocblock { + margin-left: 1.7em; +} + +.madoko .toc>.tocblock>.tocitem { + font-weight: bold; +} + +.madoko .toc { + margin-top: 1em; +} + +/* Paragraphs */ +.madoko p.para-continue { + margin-bottom: 0pt; +} + +.madoko .para-block+p { + margin-top: 0pt; +} + +.madoko ul.para-block, .madoko ol.para-block { + margin-top: 0pt; + margin-bottom: 0pt; +} + +.madoko ul.para-end, .madoko ol.para-end { + margin-bottom: 1em; +} + +.madoko dl { + margin-left: 0em; +} + +.madoko blockquote { + border-left: 5px Gainsboro solid; + padding-left: 1ex; + margin-left: 1em; +} + +/* Local page links do not get an underline unless hovering */ +.madoko a.localref { + text-decoration: none; +} +.madoko a.localref:hover { + text-decoration: underline; +} + +/* Footnotes */ +.madoko .footnotes { + font-size: smaller; + margin-top: 2em; +} + +.madoko .footnotes hr { + width: 50%; + text-align: left; +} + +.madoko .footnote { + margin-left: 1em; +} +.madoko .footnote-before { + margin-left: -1em; + width: 1em; + display: inline-block; +} + +/* Alignment */ +.madoko .align-center, .madoko .align-center>p { + text-align: center !important; +} + +.madoko .align-center pre { + text-align: left; +} + +.madoko .align-center>* { + margin-left: auto !important; + margin-right: auto !important; +} + +.madoko .align-left, .madoko .align-left>p { + text-align: left !important; +} + +.madoko .align-left>* { + margin-left: 0pt !important; + margin-right: auto !important; +} + +.madoko .align-right, .madoko .align-right>p { + text-align: right !important; +} + +.madoko .align-right>* { + margin-left: auto !important; + margin-right: 0pt !important; +} + +.madoko .align-center>table, +.madoko .align-left>table, +.madoko .align-right>table { + text-align: left !important; +} + + +/* Equations, Figure's etc. */ +.madoko .equation-before { + float: right; +} + + +/* Bibliography */ +.madoko .bibitem { + font-size: smaller; +} + +.madoko .bib-numeric .bibitem { + margin-left: 3em; + text-indent: -3em; +} + +.madoko .bibitem-before { + display: none; +} + +.madoko .bib-numeric .bibitem-before { + display: inline-block; + width: 3em; + text-align: right; +} + +.madoko .bibliography { +} + +.madoko .bibsearch { + font-size: x-small; + text-decoration:none; + color: black; + font-family: "Segoe UI Symbol", Symbola; +} + +/* General */ +.madoko .block, .madoko .figure, .madoko .bibitem, .madoko .equation, .madoko div.math { + margin-top: 1ex; + margin-bottom: 1ex; +} + +.madoko .figure { + padding: 0.5em; + margin-left: 0pt; + margin-right: 0pt; +} + +.madoko .hidden { + display: none; +} + +.madoko .invisible { + visibility: hidden; +} + +.madoko.preview .invisible { + visibility: visible; + opacity: 0.5; +} + +.madoko code.code, .madoko span.code { + white-space: pre-wrap; +} + +.madoko hr, hr.madoko { + border: none; + border-bottom: black solid 1px; + margin-bottom: 0.5ex; +} + +.madoko .framed>*:first-child { + margin-top: 0pt; +} +.madoko .framed>*:last-child { + margin-bottom: 0pt; +} + + +/* Title, authors */ +.madoko .title { + font-size: xx-large; + font-weight: bold; + margin-bottom: 1ex; +} + +.madoko .subtitle { + font-size: x-large; + margin-bottom: 1ex; + margin-top: -1ex; +} + +.madoko .titleblock>* { + margin-left: auto; + margin-right: auto; + text-align: center; +} + +.madoko .titleblock table { + width: 80%; +} + +.madoko .authorblock .author { + font-size: large; +} + +.madoko .titlenote { + margin-top: -0.5ex; + margin-bottom: 1.5ex; +} + +/* Lists */ + +.madoko ul.list-star { + list-style-type: disc; +} + +.madoko ul.list-dash { + list-style-type: none !important; +} + +.madoko ul.list-dash > li:before { + content: "\2013"; + position: absolute; + margin-left: -1em; +} + +.madoko ul.list-plus { + list-style-type: square; +} + +/* Tables */ +.madoko table.madoko { + border-collapse: collapse; +} +.madoko td, .madoko th { + padding: 0ex 0.5ex; + margin: 0pt; + vertical-align: top; +} + +.madoko .cell-border-left { + border-left: 1px solid black; +} +.madoko .cell-border-right { + border-right: 1px solid black; +} + + +.madoko thead>tr:first-child>.cell-line, +.madoko tbody:first-child>tr:first-child>.cell-line { + border-top: 1px solid black; + border-bottom: none; +} + +.madoko .cell-line, .madoko .cell-double-line { + border-bottom: 1px solid black; + border-top: none; +} + +.madoko .cell-double-line { + border-top: 1px solid black; + padding-top: 1.5px !important; +} + + +/* Math Pre */ +.madoko .input-mathpre .MathJax_Display { + text-align: left !important; +} + +.madoko div.input-mathpre { + text-align: left; + margin-top: 1.5ex; + margin-bottom: 1ex; +} + +.madoko .math-rendering { + color: gray; +} + +/* Math */ +.madoko .mathdisplay { + text-align: center; +} + + +/*--------------------------------------------------------------------------- + Default style for syntax highlighting +---------------------------------------------------------------------------*/ + +.highlighted { color: black; } +.highlighted .token.identifier { } +.highlighted .token.operators { } +.highlighted .token.keyword { color: blue } +.highlighted .token.string { color: maroon } +.highlighted .token.string.escape { color: gray } +.highlighted .token.comment { color: darkgreen } +.highlighted .token.comment.doc { font-style: normal } +.highlighted .token.constant { color: purple; } +.highlighted .token.entity { } +.highlighted .token.tag { color: blue } +.highlighted .token.info-token { color: black } +.highlighted .token.warn-token { color: black } +.highlighted .token.error-token { color: darkred } +.highlighted .token.debug-token { color: gray } +.highlighted .token.regexp { color: maroon } +.highlighted .token.attribute.name { color: navy } +.highlighted .token.attribute.value { color: maroon } +.highlighted .token.constructor { color: purple } +.highlighted .token.namespace { color: navy } +.highlighted .token.header { color: navy } +.highlighted .token.type { color: teal } +.highlighted .token.type.delimiter { color: teal; } +.highlighted .token.predefined { color: navy } +.highlighted .token.invalid { border-bottom: red dotted 1px } +.highlighted .token.code { color: maroon } +.highlighted .token.code.keyword { color: navy } +.highlighted .token.typevar { font-style: italic; } + +.highlighted .token.delimiter { } /* .[curly,square,parenthesis,angle,array,bracket] */ +.highlighted .token.number { } /* .[hex,octal,binary,float] */ +.highlighted .token.variable { } /* .[name,value] */ +.highlighted .token.meta { color: navy } /* .[content] */ + +.highlighted .token.bold { font-weight: bold; } +.highlighted .token.italic { font-style: italic; } + + +/* Pretty formatting of code */ +.madoko pre.pretty, .madoko code.pretty { + font-family: Cambria,Times,Georgia,serif; + font-size: 100%; +} + +.madoko .pretty table { + border-collapse: collapse; +} +.madoko .pretty td { + padding: 0em; +} +.madoko .pretty td.empty { + min-width: 1.5ex; +} +.madoko .pretty td.expander { + width: 100em; +} +.madoko .pretty .token.identifier { font-style: italic } +.madoko .pretty .token.constructor { font-style: italic } + + +/* --------------------------------------------------- + Styling for full documents +----------------------------------------------------*/ +body.madoko, .madoko-body { + font-family: Cambria,"Times New Roman","Liberation Serif","Times",serif; + -webkit-text-size-adjust: 100%; /* so math displays well on mobile devices */ +} + +body.madoko, .madoko-body { + padding: 0em 2em; + max-width: 88ex; /* about 88 characters */ + margin: 1em auto; +} + +body.madoko.preview { + padding: 0em 1em; +} + +.madoko p, +.madoko li { + text-align: justify; +} + +/* style headings nicer, especially h5 and h6 */ +.madoko h1, .madoko h2, .madoko h3, .madoko h4 { + margin-top: 1.22em; + margin-bottom: 1ex; +} +.madoko h1+p, .madoko h2+p, .madoko h3+p, .madoko h4+p, .madoko h5+p { + margin-top: 1ex; +} +.madoko h5, .madoko h6 { + margin-top: 1ex; + font-size: 1em; +} +.madoko h5 { + margin-bottom: 0.5ex; +} +.madoko h5 + p { + margin-top: 0.5ex; +} +.madoko h6 { + margin-bottom: 0pt; +} +.madoko h6 + p { + margin-top: 0pt; +} + + +/* Fix monospace display (see http://code.stephenmorley.org/html-and-css/fixing-browsers-broken-monospace-font-handling/) */ +.madoko pre, .madoko code, .madoko kbd, .madoko samp, .madoko tt, .madoko .monospace, .madoko .token.indent, .madoko .reveal pre, .madoko .reveal code, .madoko .email { + font-family: Consolas,"Andale Mono WT","Andale Mono",Lucida Console,Monaco,monospace,monospace; + font-size: 0.85em; +} +.madoko pre code, .madoko .token.indent { + font-size: 0.95em; +} + +.madoko pre code { + font-family: inherit !important; +} + +/* Code prettify */ +.madoko ol.linenums li { + background-color: white; + list-style-type: decimal; +} + +/* Merging */ +.madoko .remote { + background-color: #F0FFF0; +} +.madoko .remote + * { + margin-top: 0pt; +} + +/* --------------------------------------------------- + Print settings +----------------------------------------------------*/ + +@media print { + body.madoko, .madoko-body { + font-size: 10pt; + } + @page { + margin: 1in 1.5in; + } +} + +/* --------------------------------------------------- + Mobile device settings +----------------------------------------------------*/ + +@media only screen and (max-device-width:1024px) { + body.madoko, .madoko-body { + padding: 0em 1em; + } +} diff -Nru dafny-1.9.5/Docs/DafnyRef/out/DafnyRef.html dafny-1.9.7/Docs/DafnyRef/out/DafnyRef.html --- dafny-1.9.5/Docs/DafnyRef/out/DafnyRef.html 1970-01-01 00:00:00.000000000 +0000 +++ dafny-1.9.7/Docs/DafnyRef/out/DafnyRef.html 2016-06-05 21:11:14.000000000 +0000 @@ -0,0 +1,8287 @@ + + + + + + + + + Draft Dafny Reference Manual + + + + + + +

+ + + + +
+
+
+
Draft Dafny Reference Manual
+
Manuscript Dafny Reference
+2016-01-27 21:22
+
+
+
Richard L. Ford
+
+
K. Rustan M. Leino
+
+
+

Abstract. This is the Dafny reference manual which describes the Dafny programming +language and how to use the Dafny verification system. +Parts of this manual are more tutorial in nature in order to help the +user understand how to do proofs with Dafny.

+

0. Introduction

+

Dafny [18] is a programming language with built-in specification constructs. +The Dafny static program verifier can be used to verify the functional +correctness of programs. +

+

The Dafny programming language is designed to support the static +verification of programs. It is imperative, sequential, supports generic +classes, methods and functions, dynamic allocation, inductive and +co-inductive datatypes, and specification constructs. The +specifications include pre- and postconditions, frame specifications +(read and write sets), and termination metrics. To further support +specifications, the language also offers updatable ghost variables, +recursive functions, and types like sets and sequences. Specifications +and ghost constructs are used only during verification; the compiler +omits them from the executable code. +

+

The Dafny verifier is run as part of the compiler. As such, a programmer +interacts with it much in the same way as with the static type +checker—when the tool produces errors, the programmer responds by +changing the program’s type declarations, specifications, and statements. +

+

The easiest way to try out Dafny is in your web browser at +rise4fun[15]. Once you get a bit +more serious, you may prefer to download it +to run it on your machine. Although Dafny can be run from the command +line (on Windows or other platforms), the preferred way to run it is in +Microsoft Visual Studio 2012 (or newer) or using emacs, where the Dafny +verifier runs in the background while the programmer is editing the +program. +

+

The Dafny verifier is powered +by Boogie +[0, 16, 23] +and Z3[4]. +

+

From verified programs, the Dafny compiler produces code (.dll or +.exe) for the .NET platform via intermediate C# files. However, the +facilities for interfacing with other .NET code are minimal. +

+

This is the reference manual for the Dafny verification system. It is +based on the following references: +[5, 1315, 18, 20, 22] +

+

The main part of the reference manual is in top down order except for an +initial section that deals with the lowest level constructs. +

0.0. Dafny Example

+

To give a flavor of Dafny, here is the solution to a competition problem. +

+
// VSComp 2010, problem 3, find a 0 in a linked list and return how many
+// nodes were skipped until the first 0 (or end-of-list) was found.
+// Rustan Leino, 18 August 2010.
+// 
+// The difficulty in this problem lies in specifying what the return
+// value 'r' denotes and in proving that the program terminates.  Both of
+// these are addressed by declaring a ghost field 'List' in each
+// linked-list node, abstractly representing the linked-list elements
+// from the node to the end of the linked list.  The specification can
+// now talk about that sequence of elements and can use 'r' as an index
+// into the sequence, and termination can be proved from the fact that
+// all sequences in Dafny are finite.
+// 
+// We only want to deal with linked lists whose 'List' field is properly
+// filled in (which can only happen in an acyclic list, for example).  To
+// that avail, the standard idiom in Dafny is to declare a predicate
+// 'Valid()' that is true of an object when the data structure
+// representing object's abstract value is properly formed.  The
+// definition of 'Valid()' is what one intuitively would think of as the
+// ''object invariant'', and it is mentioned explicitly in method pre-
+// and postconditions.  As part of this standard idiom, one also declared
+// a ghost variable 'Repr' that is maintained as the set of objects that
+// make up the representation of the aggregate object--in this case, the
+// Node itself and all its successors.
+
+class Node {
+  ghost var List: seq<int>
+  ghost var Repr: set<Node>
+  var head: int
+  var next: Node
+
+  predicate Valid()
+    reads this, Repr
+  {
+    this in Repr &&
+    1 <= |List| && List[0] == head &&
+    (next == null ==> |List| == 1) &&
+    (next != null ==>
+      next in Repr && next.Repr <= Repr && this !in next.Repr && 
+      next.Valid() && next.List == List[1..])
+  }
+
+  static method Cons(x: int, tail: Node) returns (n: Node)
+    requires tail == null || tail.Valid()
+    ensures n != null && n.Valid()
+    ensures if tail == null then n.List == [x] 
+                            else n.List == [x] + tail.List
+  {
+    n := new Node;
+    n.head, n.next := x, tail;
+    if (tail == null) {
+      n.List := [x];
+      n.Repr := {n};
+    } else {
+      n.List := [x] + tail.List;
+      n.Repr := {n} + tail.Repr;
+    }
+  }
+}
+
+method Search(ll: Node) returns (r: int)
+  requires ll == null || ll.Valid()
+  ensures ll == null ==> r == 0
+  ensures ll != null ==>
+            0 <= r && r <= |ll.List| &&
+            (r < |ll.List| ==> ll.List[r] == 0 && 0 !in ll.List[..r]) &&
+            (r == |ll.List| ==> 0 !in ll.List)
+{
+  if (ll == null) {
+    r := 0;
+  } else {
+    var jj,i := ll,0;
+    while (jj != null && jj.head != 0)
+      invariant jj != null ==> jj.Valid() && i + |jj.List| == |ll.List| && 
+                               ll.List[i..] == jj.List
+      invariant jj == null ==> i == |ll.List|
+      invariant 0 !in ll.List[..i]
+      decreases |ll.List| - i
+    {
+      jj := jj.next;
+      i := i + 1;
+    }
+    r := i;
+  }
+}
+
+method Main()
+{
+  var list: Node := null;
+  list := list.Cons(0, list);
+  list := list.Cons(5, list);
+  list := list.Cons(0, list);
+  list := list.Cons(8, list);
+  var r := Search(list);
+  print "Search returns ", r, "\n";
+  assert r == 1;
+}

1. Lexical and Low Level Grammar

+

Dafny uses the Coco/R lexer and parser generator for its lexer and parser +(http://www.ssw.uni-linz.ac.at/Research/Projects/Coco)[27]. +The Dafny input file to Coco/R is the Dafny.atg file in the source tree. +A Coco/R input file consists of code written in the target language +(e.g. C#) intermixed with these special sections: +

+
    +
  1. The Characters section which defines classes of characters that are used +in defining the lexer (Section 1.0). +
  2. +
  3. The Tokens section which defines the lexical tokens (Section 1.1). +
  4. +
  5. The Productions section which defines the grammar. The grammar productions +are distributed in the later parts of this document in the parts where +those constructs are explained. +
+ +

The grammar presented in this document was derived from the Dafny.atg +file but has been simplified by removing details that, though needed by +the parser, are not needed to understand the grammar. In particular, the +following transformation have been performed. +

+
    +
  • The semantics actions, enclosed by “(.” and “.)”, where removed. +
  • +
  • There are some elements in the grammar used for error recovery +(“SYNC”). These were removed. +
  • +
  • There are some elements in the grammar for resolving conflicts +(“IF(b)”). These have been removed. +
  • +
  • Some comments related to Coco/R parsing details have been removed. +
  • +
  • A Coco/R grammar is an attributed grammar where the attributes enable +the productions to have input and output parameters. These attributes +were removed except that boolean input parameters that affect +the parsing are kept. + +
      +
    • In our representation we represent these +in a definition by giving the names of the parameters following +the non-terminal name. For example entity1(allowsX). +
    • +
    • In the case of uses of the parameter, the common case is that the +parameter is just passed to a lower-level non-terminal. In that +case we just give the name, e.g. entity2(allowsX). +
    • +
    • If we want to given an explicit value to a parameter, we specify it in +a keyword notation like this: entity2(allowsX: true). +
    • +
    • +

      In some cases the value to be passed depends on the grammatical context. +In such cases we give a description of the conditions under which the +parameter is true, enclosed in parenthesis. For example: +

      +

      FunctionSignatureOrEllipsis_(allowGhostKeyword: ("method" present)) +

      means that the allowGhostKeyword parameter is true if the +“method” keyword was given in the associated FunctionDecl. +
    • +
    • Where a parameter affects the parsing of a non-terminal we will +explain the effect of the parameter. +
+ +

The names of character sets and tokens start with a lower case +letter but the names of grammar non-terminals start with +an upper-case letter. +

+

The grammar uses Extended BNF notation. See the Coco/R Referenced +manual +for details. But in summary: +

+
    +
  • identifiers starting with a lower case letter denote +terminal symbols, +
  • +
  • identifiers starting with an upper case letter denote nonterminal +symbols. +
  • +
  • Strings denote themselves. +
  • +
  • = separates the sides of a production, e.g. A = a b c +
  • +
  • In the Coco grammars “.” terminates a production, but for readability +in this document a production starts with the defined identifier in +the left margin and may be continued on subsequent lines if they +are indented. +
  • +
  • | separates alternatives, e.g. a b | c | d e means a b or c or d e +
  • +
  • ( ) groups alternatives, e.g. (a | b) c means a c or b c +
  • +
  • [ ] option, e.g. [a] b means a b or b +
  • +
  • { } iteration (0 or more times), e.g. {a} b means b or a b or a a b or … +
  • +
  • We allow | inside [ ] and { }. So [a | b] is short for [(a | b)] +and {a | b} is short for {(a | b)}. +
  • +
  • The first production defines the name of the grammar, in this case Dafny. +
+ +

In addition to the Coco rules, for the sake of readability we have adopted +these additional conventions. +

+
    +
  • We allow - to be used. a - b means it matches if it matches a but not b. +
  • +
  • To aid in explaining the grammar we have added some additional productions +that are not present in the original grammar. We name these with a trailing +underscore. If you inline these where they are referenced, the result should +let you reconstruct the original grammar. +
+ +

For the convenience of the reader, any references to character sets, +tokens, or grammar non-terminals in this document are hyper-links that +will link to the definition of the entity. +

1.0. Character Classes

+

This section defines character classes used later in the token definitions. +In this section backslash is used to start an escape sequence, so for example +‘\n’ denotes the single linefeed character. +

+
letter = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
+

At present, a letter is an ASCII upper or lowercase letter. Other Unicode letters +are not supported. +

+
digit = "0123456789"
+

A digit is just one of the base-10 digits. +

+
posDigit = "123456789"
+

A posDigit is a digit, excluding 0. +

+
hexdigit = "0123456789ABCDEFabcdef"
+

A hexdigit character is a digit or one of the letters from ‘A’ to ‘F’ in either case. +

+
special = "'_?"
+

The special characters are the characters in addition to alphanumeric characters +that are allowed to appear in a Dafny identifier. These are +

+
    +
  • "'" because mathematicians like to put primes on identifiers and some ML +programmers like to start names of type parameters with a “'”. +
  • +
  • “_” because computer scientists expect to be able to have underscores in identifiers. +
  • +
  • “?” because it is useful to have “?” at the end of names of predicates, +e.g. “Cons?”. +
+ +
cr        = '\r'
+

A carriage return character. +

+
lf        = '\n'
+

A line feed character. +

+
tab       = '\t'
+

A tab character. +

+
space = ' '
+

A space character. +

+
nondigitIdChar = letter + special
+

The characters that can be used in an identifier minus the digits. +

+
idchar = nondigitIdChar + digit
+

The characters that can be used in an identifier. +

+
nonidchar = ANY - idchar
+

Any character except those that can be used in an identifier. +

+
charChar = ANY - '\'' - '\\' - cr - lf
+

Characters that can appear in a character constant. +

+
stringChar = ANY - '"' - '\\' - cr - lf
+

Characters that can appear in a string constant. +

+
verbatimStringChar = ANY - '"'
+

Characters that can appear in a verbatim string. +

1.0.0. Comments

+

Comments are in two forms. +

+
    +
  • They may go from “/*” to “*/” and be nested. +
  • +
  • They may go from “//” to the end of the line. +
+

1.1. Tokens

+

As with most languages, Dafny syntax is defined in two levels. First the stream +of input characters is broken up into tokens. Then these tokens are parsed +using the Dafny grammar. The Dafny tokens are defined in this section. +

1.1.0. Reserved Words

+

The following reserved words appear in the Dafny grammar and may not be used +as identifiers of user-defined entities: +

+
reservedword = 
+    "abstract" | "array" | "as" | "assert" | "assume" | "bool" | "break" |
+    "calc" | "case" | "char" | "class" | "codatatype" | "colemma" |
+    "constructor" | "copredicate" | "datatype" | "decreases" |
+    "default" | "else" | "ensures" | "exists" | "extends" | "false" |
+    "forall" | "free" | "fresh" | "function" | "ghost" | "if" | "imap" | "import" |
+    "in" | "include" | "inductive" | "int" | "invariant" | "iset" | "iterator" | "label" |
+    "lemma" | "map" | "match" | "method" | "modifies" | "modify" |
+    "module" | "multiset" | "nat" | "new" | "newtype" | "null" | "object" |
+    "old" | "opened" | "predicate" | "print" | "protected" |
+    "reads" | "real" | "refines" | "requires" | "return" | "returns" | "seq" |
+    "set" | "static" | "string" | "then" | "this" | "trait" | "true" | "type" |
+    "var" | "where" | "while" | "yield" | "yields" | arrayToken
+
+arrayToken = "array" [ posDigit { digit }]
+

An arrayToken is a reserved word that denotes an array type of +given rank. array is an array type of rank 1 (aka a vector). array2 +is the type of two-dimensional arrays, etc. +

+

TODO: Is “_” is reserved word? +

1.1.1. Identifiers

+
ident = nondigitIdChar { idchar } - arraytoken - chartoken - reservedword 
+

In general Dafny identifiers are sequences of idChar characters where +the first character is a nondigitIdChar. However tokens that fit this pattern +are not identifiers if they look like an array type token, a character literal, +or a reserved work. +

1.1.2. Digits

+
digits = digit {['_'] digit}
+

A sequence of decimal digits, possibly interspersed with underscores for readability. Example: 1_234_567. +

+
hexdigits = "0x" hexdigit {['_'] hexdigit}
+

A hexadecimal constant, possibly interspersed with underscores for readability. +Example: 0xffff_ffff. +

+
decimaldigits = digit {['_'] digit} '.' digit {['_'] digit}
+

A decimal fraction constant, possibly interspersed with underscores for readability. +Example: 123_456.789_123. +

1.1.3. Escaped Character

+

In this section the “\” characters are literal. +

+
escapedChar =
+    ( "\'" | "\"" | "\\" | "\0" | "\n" | "\r" | "\t"
+      | "\u" hexdigit hexdigit hexdigit hexdigit
+    )
+

In Dafny character or string literals escaped characters may be used +to specify the presence of the delimiting quote, or back slash, +or null, or new line, or carriage return or tab, or the +Unicode character with given hexadecimal representation. + +

1.1.4. Character Constant Token

+
charToken = "'" ( charChar | escapedChar ) "'" 
+

A character constant is enclosed by “'” and includes either a character +from the charChar set, or an escaped character. Note that although Unicode +letters are not allowed in Dafny identifiers, Dafny does support Unicode +in its character and string constants and in its data. A character +constant has type char. +

1.1.5. String Constant Token

+
stringToken =
+    '"' { stringChar | escapedChar }  '"'
+  | '@' '"' { verbatimStringChar | '"' '"' } '"'
+

A string constant is either a normal string constant or a verbatim string constant. +A normal string constant is enclosed by ‘"’ and can contain characters from the +stringChar set and escapes. +

+

A verbatim string constant is enclosed between ‘@"’ and ‘"’ and can +consists of any characters (including newline characters) except that two +successive double quotes give a way to escape one quote character inside +the string. +

1.2. Low Level Grammar Productions

1.2.0. Identifier Variations

+
Ident = ident 
+

The Ident non-terminal is just an ident token and represents an ordinary +identifier. +

+
DotSuffix = 
+  ( ident | digits | "requires" | "reads" ) 
+

When using the dot notation to denote a component of a compound entity +the token following the “.”, in addition to being an identifier, +can also be a natural number, or one of the keywords requires or reads. +

+
    +
  • Digits can be used to name fields of classes and destructors of +datatypes. For example, the built-in tuple datatypes have destructors +named 0, 1, 2, etc. Note that as a field or destructor name, internal +underscores matter, so 10 is different from 1_0. +
  • +
  • m.requires is used to denote the precondition for method m. +
  • +
  • m.reads is used to denote the things that method m may read. +
+ +
NoUSIdent = ident - "_" { idChar }
+

A NoUSIdent is an identifier except that identifiers with a leading +underscore are not allowed. The names of user-defined entities are +required to be NoUSIdents. We introduce more mnemonic names +for these below (e.g. ClassName). +

+
WildIdent = NoUSIdent | "_" 
+

Identifier, disallowing leading underscores, except the “wildcard” +identifier “_”. When “_” appears it is replaced by a unique generated +identifier distinct from user identifiers. +

1.2.1. NoUSIdent Synonyms

+

In the productions for the declaration of user-defined entities the name of the +user-defined entity is required to be an identifier that does not start +with an underscore, i.e., a NoUSIdent. To make the productions more +mnemonic, we introduce the following synonyms for NoUSIdent. +

+
ModuleName = NoUSIdent 
+ClassName = NoUSIdent 
+TraitName = NoUSIdent 
+DatatypeName = NoUSIdent 
+DatatypeMemberName = NoUSIdent 
+NewtypeName = NoUSIdent 
+NumericTypeName = NoUSIdent 
+SynonymTypeName = NoUSIdent 
+IteratorName = NoUSIdent 
+TypeVariableName = NoUSIdent 
+MethodName = NoUSIdent 
+FunctionName = NoUSIdent 
+PredicateName = NoUSIdent 
+CopredicateName = NoUSIdent 
+LabelName = NoUSIdent
+AttributeName = NoUSIdent
+FieldIdent = NoUSIdent 
+

A FieldIdent is one of the ways to identify a field. The other is +using digits. +

1.2.2. Qualified Names

+

A qualified name starts with the name of the top-level entity and then is followed by +zero or more DotSuffixs which denote a component. Examples: +

+
    +
  • Module.MyType1 +
  • +
  • MyTuple.1 +
  • +
  • MyMethod.requires +
+ +

The grammar does not actually have a production for qualified names +except in the special case of a qualified name that is known to be +a module name, i.e. a QualifiedModuleName. +

1.2.3. Identifier-Type Combinations

+

In this section, we describe some nonterminals that combine an identifier and a type. +

+
IdentType = WildIdent ":" Type
+

In Dafny, a variable or field is typically declared by giving its name followed by +a colon and its type. An IdentType is such a construct. +

+
GIdentType(allowGhostKeyword) = [ "ghost" ] IdentType
+

A GIdentType is a typed entity declaration optionally preceded by “ghost”. The ghost +qualifier means the entity is only used during verification but not in the generated code. +Ghost variables are useful for abstractly representing internal state in specifications. +If allowGhostKeyword is false then “ghost” is not allowed. +

+
LocalIdentTypeOptional = WildIdent [ ":" Type ] 
+

A LocalIdentTypeOptional is used when declaring local variables. In +such a case a value may be specified for the variable in which case the +type may be omitted because it can be inferred from the initial value. +The initial value value may also be omitted. +

+
IdentTypeOptional = WildIdent [ ":" Type ] 
+

A IdentTypeOptional is typically used in a context where the type of the identifier +may be inferred from the context. Examples are in pattern matching or quantifiers. +

+
TypeIdentOptional = [ "ghost" ] ( NoUSIdent | digits ) ":" ] Type
+

TypeIdentOptionals are used in FormalsOptionalIds. This represents situations +where a type is given but there may not be an identifier. +

+
FormalsOptionalIds = "(" [TypeIdentOptional  { "," TypeIdentOptional } ] ")" 
+

A FormalsOptionalIds is a formal parameter list in which the types are required +but the names of the parameters is optional. This is used in algebraic +datatype definitions. +

1.2.4. Numeric Literals

+
Nat = ( digits | hexdigits ) 
+

A Nat represents a natural number expressed in either decimal or hexadecimal. +

+
Dec = (decimaldigits ) 
+

A Dec represents a decimal fraction literal. +

2. Programs

+
Dafny = { IncludeDirective_ } { TopDecl } EOF 
+

At the top level, a Dafny program (stored as files with extension .dfy) +is a set of declarations. The declarations introduce (module-level) +methods and functions, as well as types (classes, traits, inductive and +co-inductive datatypes, new_types, type synonyms, opaque types, and +iterators) and modules, where the order of introduction is irrelevant. A +class also contains a set of declarations, introducing fields, methods, +and functions. +

+

When asked to compile a program, Dafny looks for the existence of a +Main() method. If a legal Main() method is found, the compiler will emit +a .EXE; otherwise, it will emit a .DLL. +

+

(If there is more than one Main(), Dafny will try to emit an .EXE, but + this may cause the C# compiler to complain. One could imagine improving + this functionality so that Dafny will produce a polite error message in + this case.) +

+

In order to be a legal Main() method, the following must be true: +

+
    +
  • The method takes no parameters +
  • +
  • The method is not a ghost method +
  • +
  • The method has no requires clause +
  • +
  • The method has no modifies clause +
  • +
  • If the method is an instance (that is, non-static) method in a class, +then the enclosing class must not declare any constructor +
+ +

Note, however, that the following are allowed: +

+
    +
  • The method is allowed to be an instance method as long as the enclosing +class does not declare any constructor. In this case, the runtime +system will allocate an object of the enclosing class and will invoke +Main() on it. +
  • +
  • The method is allowed to have ensures clauses +
  • +
  • The method is allowed to have decreases clauses, including a +decreases *. (If Main() has a decreases *, then its execution may +go on forever, but in the absence of a decreases * on Main(), Dafny +will have verified that the entire execution will eventually +terminate.) +
+ +

An invocation of Dafny may specify a number of source files. +Each Dafny file follows the grammar of the Dafny non-terminal. +

+

It consists of a sequence of optional include directives followed by top +level declarations followed by the end of the file. +

2.0. Include Directives

+
IncludeDirective_ = "include" stringToken  
+

Include directives have the form "include" stringToken where +the string token is either a normal string token or a +verbatim string token. The stringToken is interpreted as the name of +a file that will be included in the Dafny source. These included +files also obey the Dafny grammar. Dafny parses and processes the +transitive closure of the original source files and all the included files, +but will not invoke the verifier on these unless they have been listed +explicitly on the command line. +

2.1. Top Level Declarations

+
TopDecl = { { DeclModifier }
+  ( SubModuleDecl
+  | ClassDecl
+  | DatatypeDecl
+  | NewtypeDecl
+  | SynonymTypeDecl
+  | IteratorDecl
+  | TraitDecl
+  | ClassMemberDecl(moduleLevelDecl: true)
+  } 
+

Top-level declarations may appear either at the top level of a Dafny file, +or within a SubModuleDecl. A top-level declaration is one of the following +types of declarations which are described later. +

+

The ClassDecl, DatatypeDecl, NewtypeDecl, +SynonymTypeDecl, IteratorDecl, and TraitDecl declarations are +type declarations and are describe in Section 5. Ordinarily +ClassMemberDecls appear in class declarations but they can also +appear at the top level. In that case they are included as part of an +implicit top-level class and are implicitly static (but cannot be +declared as static). In addition a ClassMemberDecl that appears at +the top level cannot be a FieldDecl. +

2.2. Declaration Modifiers

+
DeclModifier =
+  ( "abstract" | "ghost" | "static" | "protected"
+  | "extern" [ stringToken]
+  )
+

Top level declarations may be preceded by zero or more declaration +modifiers. Not all of these are allowed in all contexts. +

+

The “abstract” modifiers may only be used for module declarations. +An abstract module can leave some entities underspecified. +Abstract modules are not compiled to C#. +

+

The ghost modifier is used to mark entities as being used for +specification only, not for compilation to code. +

+

The static modifier is used for class members that that +are associated with the class as a whole rather than with +an instance of the class. +

+

The protected modifier is used to control the visibility of the +body of functions. +

+

The extern modifier is used to alter the CompileName of +entities. The CompileName is the name for the entity +when translating to Boogie or C#. +

+

The following table shows modifiers that are available +for each of the kinds of declaration. In the table +we use already-ghost to denote that the item is not +allowed to have the ghost modifier because it is already +implicitly ghost. +

+ + + + + + + + + + + + + + + + + + + + + +
Declaration allowed modifiers
module abstract
class extern
trait -
datatype or codatatype -
field ghost
newtype -
synonym types -
iterators -
method ghost static extern
lemma, colemma, comethod already-ghost static protected
inductive lemma already-ghost static
constructor -
function (non-method) already-ghost static protected
function method already-ghost static protected extern
predicate (non-method) already-ghost static protected
predicate method already-ghost static protected extern
inductive predicate already-ghost static protected
copredicate already-ghost static protected

3. Modules

+
SubModuleDecl = ( ModuleDefinition_ | ModuleImport_ ) 
+

Structuring a program by breaking it into parts is an important part of +creating large programs. In Dafny, this is accomplished via modules. +Modules provide a way to group together related types, classes, methods, +functions, and other modules together, as well as control the scope of +declarations. Modules may import each other for code reuse, and it is +possible to abstract over modules to separate an implementation from an +interface. +

3.0. Declaring New Modules

+
ModuleDefinition_ = "module" { Attribute } ModuleName 
+        [ [  "exclusively" ] "refines" QualifiedModuleName ] 
+        "{" { TopDecl } "}" 
+QualifiedModuleName = Ident { "." Ident } 
+

A qualified name that is known to refer to a module. +

+

A new module is declared with the module keyword, followed by the name +of the new module, and a pair of curly braces ({}) enclosing the body +of the module: +

+
module Mod {
+  ...
+}
+

A module body can consist of anything that you could put at the top +level. This includes classes, datatypes, types, methods, functions, etc. +

+
module Mod {
+  class C {
+    var f: int
+    method m() 
+  }
+  datatype Option = A(int) | B(int)
+  type T
+  method m()
+  function f(): int
+}
+

You can also put a module inside another, in a nested fashion: +

+
module Mod {
+  module Helpers {
+    class C {
+      method doIt()
+      var f: int
+    }
+  }
+}
+

Then you can refer to the members of the Helpers module within the +Mod module by prefixing them with “Helpers.”. For example: +

+
module Mod {
+  module Helpers { ... }
+  method m() {
+    var x := new Helpers.C;
+    x.doIt();
+    x.f := 4;
+  }
+}
+

Methods and functions defined at the module level are available like +classes, with just the module name prefixing them. They are also +available in the methods and functions of the classes in the same +module. +

+
module Mod {
+  module Helpers {
+    function method addOne(n: nat): nat {
+      n + 1
+    }
+  }
+  method m() {
+    var x := 5;
+    x := Helpers.addOne(x); // x is now 6
+  }
+}

3.1. Importing Modules

+
ModuleImport_ = "import" ["opened" ] ModuleName
+    [ "=" QualifiedModuleName 
+    | "as" QualifiedModuleName ["default" QualifiedModuleName ] 
+    ]
+    [ ";" ] 
+

Declaring new submodules is useful, but sometimes you want to refer to +things from an existing module, such as a library. In this case, you +can import one module into another. This is done via the import +keyword, and there are a few different forms, each of which has a +different meaning. The simplest kind is the concrete import, and has +the form import A = B. This declaration creates a reference to the +module B (which must already exist), and binds it to the new name +A. Note this new name, i.e. A, is only bound in the module containing +the import declaration; it does not create a global alias. For +example, if Helpers was defined outside of Mod, then we could import +it: +

+
module Helpers {
+  ...
+}
+module Mod {
+  import A = Helpers
+  method m() {
+    assert A.addOne(5) == 6;
+  }
+}
+

Note that inside m(), we have to use A instead of Helpers, as we bound +it to a different name. The name Helpers is not available inside m(), +as only names that have been bound inside Mod are available. In order +to use the members from another module, it either has to be declared +there with module or imported with import. +

+

We don't have to give Helpers a new name, though, if we don't want +to. We can write import Helpers = Helpers if we want to, and Dafny +even provides the shorthand import Helpers for this behavior. You +can't bind two modules with the same name at the same time, so +sometimes you have to use the = version to ensure the names do not +clash. +

+

The QualifiedModuleName in the ModuleImport_ starts with a +sibling module of the importing module, or with a submodule of the +importing module. There is no wya to refer to the parent module, only +sibling modules (and their submodules). +

3.2. Opening Modules

+

Sometimes, prefixing the members of the module you imported with the +name is tedious and ugly, even if you select a short name when +importing it. In this case, you can import the module as opened, +which causes all of its members to be available without adding the +module name. The opened keyword must immediately follow import, if it +is present. For example, we could write the previous example as: +

+
module Mod {
+  import opened Helpers
+  method m() {
+    assert addOne(5) == 6;
+  }
+}
+

When opening modules, the newly bound members will have low priority, +so they will be hidden by local definitions. This means if you define +a local function called addOne, the function from Helpers will no +longer be available under that name. When modules are opened, the +original name binding is still present however, so you can always use +the name that was bound to get to anything that is hidden. +

+
module Mod {
+  import opened Helpers
+  function addOne(n: nat): nat {
+    n - 1
+  }
+  method m() {
+    assert addOne(5) == 6; // this is now false,
+                           // as this is the function just defined
+    assert Helpers.addOne(5) == 6; // this is still true
+  }
+}
+

If you open two modules that both declare members with the same name, +then neither member can be referred to without a module prefix, as it +would be ambiguous which one was meant. Just opening the two modules +is not an error, however, as long as you don't attempt to use members +with common names. The opened keyword can be used with any kind of +import declaration, including the module abstraction form. +

3.3. Module Abstraction

+

Sometimes, using a specific implementation is unnecessary; instead, +all that is needed is a module that implements some interface. In +that case, you can use an abstract module import. In Dafny, this is +written import A as B. This means bind the name A as before, but +instead of getting the exact module B, you get any module which is a +adheres of B. Typically, the module B may have abstract type +definitions, classes with bodyless methods, or otherwise be unsuitable +to use directly. Because of the way refinement is defined, any +refinement of B can be used safely. For example, if we start with: +

+
module Interface {
+  function method addSome(n: nat): nat
+    ensures addSome(n) > n
+}
+module Mod {
+  import A as Interface
+  method m() {
+    assert 6 <= A.addSome(5);
+  }
+}
+

then we can be more precise if we know that addSome actually adds +exactly one. The following module has this behavior. Further, the +postcondition is stronger, so this is actually a refinement of the +Interface module. +

+
module Implementation {
+  function method addSome(n: nat): nat
+    ensures addSome(n) == n + 1
+  {
+    n + 1
+  }
+}
+

We can then substitute Implementation for A in a new module, by +declaring a refinement of Mod which defines A to be Implementation. +

+
module Mod2 refines Mod {
+  import A = Implementation
+  ...
+}
+

You can also give an implementation directly, without introducing a +refinement, by giving a default to the abstract import: +

+
module Interface {
+  function method addSome(n: nat): nat 
+    ensures addSome(n) > n
+}
+module Mod {
+  import A as Interface default Implementation
+  method m() {
+    assert 6 <= A.addSome(5);
+  }
+}
+module Implementation {
+  function method addSome(n: nat): nat 
+    ensures addSome(n) == n + 1
+  {
+    n + 1
+  }
+}
+module Mod2 refines Mod {
+  import A as Interface default Implementation
+  ...
+}
+

Regardless of whether there is a default, the only things known about +A in this example is that it has a function addSome that returns a +strictly bigger result, so even with the default we still can't prove +that A.addSome(5) == 6, only that 6 <= A.addSome(5). +

+

When you refine an abstract import into a concrete one, or giving a +default, Dafny checkes that the concrete module is a +refinement of the abstract one. This means that the methods must have +compatible signatures, all the classes and datatypes with their +constructors and fields in the abstract one must be present in the +concrete one, the specifications must be compatible, etc. +

3.4. Module Ordering and Dependencies

+

Dafny isn't particular about which order the modules appear in, but +they must follow some rules to be well formed. As a rule of thumb, +there should be a way to order the modules in a program such that each +only refers to things defined before it in the source text. That +doesn't mean the modules have to be given in that order. Dafny will +figure out that order for you, assuming you haven't made any circular +references. For example, this is pretty clearly meaningless: +

+
import A = B
+import B = A
+

You can have import statements at the toplevel, and you can import +modules defined at the same level: +

+
import A = B
+method m() {
+  A.whatever();
+}
+module B { ... }
+

In this case, everything is well defined because we can put B first, +followed by the A import, and then finally m(). If there is no +ordering, then Dafny will give an error, complaining about a cyclic +dependency. +

+

Note that when rearranging modules and imports, they have to be kept +in the same containing module, which disallows some pathological +module structures. Also, the imports and submodules are always +considered to be first, even at the toplevel. This means that the +following is not well formed: +

+
method doIt() { }
+module M {
+  method m() {
+    doIt();
+  }
+}
+

because the module M must come before any other kind of members, such +as methods. To define global functions like this, you can put them in +a module (called Globals, say) and open it into any module that needs +its functionality. Finally, if you import via a path, such as import A += B.C, then this creates a dependency of A on B, as we need to know +what B is (is it abstract or concrete, or a refinement?). +

3.5. Name Resolution

+

When Dafny sees something like A<T>.B<U>.C<V>, how does it know what each part +refers to? The process Dafny uses to determine what identifier +sequences like this refer to is name resolution. Though the rules may +seem complex, usually they do what you would expect. Dafny first looks +up the initial identifier. Depending on what the first identifier +refers to, the rest of the identifier is looked up in the appropriate +context. +

+

In terms of the grammar, sequences like the above are represented as +a NameSegment followed by 0 or more Suffixes. A Suffix is +more general and the form shown above would be for when the +Suffix is an AugmentedDotSuffix_. +

+

The resolution is different depending on whether it is in +an expression context or a type context. +

3.5.0. Expression Context Name Resolution

+

The leading NameSegment is resolved using the first following +rule that succeeds. +

+
    +
  1. +

    Local variables, parameters and bound variables. These are things like +x, y, and i in var x;, ... returns (y: int), and +forall i :: .... The declaration chosen is the match from the +innermost matching scope. +

  2. +
  3. +

    If in a class, try to match a member of the class. If the member that +is found is not static an implicit this is inserted. This works for +fields, functions, and methods of the current class (if in a static +context, then only static methods and functions are allowed). You can +refer to fields of the current class either as this.f or f, +assuming of course that f hasn't be hidden by one of the above. You +can always prefix this if needed, which cannot be hidden. (Note, a +field whose name is a string of digits must always have some prefix.) +

  4. +
  5. +

    If there is no Suffix, then look for a datatype constructor, if +unambiguous. Any datatypes that don't need qualification (so the +datatype name itself doesn't need a prefix), and also have a uniquely +named constructor, can be referred to just by its name. So if +datatype List = Cons(List) | Nil is the only datatype that declares +Cons and Nil constructors, then you can write Cons(Cons(Nil)). +If the constructor name is not unique, then you need to prefix it with +the name of the datatype (for example List.Cons(List.Nil))). This is +done per constructor, not per datatype. +

  6. +
  7. +

    Look for a member of the enclosing module. +

  8. +
  9. +

    Module-level (static) functions and methods +

+ +

TODO: Not sure about the following paragraph. +Opened modules are treated at each level, after the declarations in the +current module. Opened modules only affect steps 2, 3 and 5. If a +ambiguous name is found, an error is generated, rather than continuing +down the list. After the first identifier, the rules are basically the +same, except in the new context. For example, if the first identifier is +a module, then the next identifier looks into that module. Opened modules +only apply within the module it is opened into. When looking up into +another module, only things explicitly declared in that module are +considered. +

+

To resolve expression E.id: +

+

First resolve expression E and any type arguments. +

+
    +
  • If E resolved to a module M: + +
      +
    1. If E.id<T> is not followed by any further suffixes, look for +unambiguous datatype constructor. +
    2. +
    3. Member of module M: a sub-module (including submodules of imports), +class, datatype, etc. +
    4. +
    5. Static function or method. +
  • +
  • If E denotes a type: + +
      +
    1. Look up id as a member of that type +
  • +
  • If E denotes an expression: + +
      +
    1. Let T be the type of E. Look up id in T. +
+

3.5.1. Type Context Name Resolution

+

In a type context the priority of NameSegment resolution is: +

+
    +
  1. +

    Type parameters. +

  2. +
  3. +

    Member of enclosing module (type name or the name of a module). +

+ +

To resolve expression E.id: +

+
    +
  • If E resolved to a module M: + +
      +
    1. Member of module M: a sub-module (including submodules of imports), +class, datatype, etc. +
  • +
  • If E denotes a type: + +
      +
    1. If allowDanglingDotName: Return the type of E and the given E.id, +letting the caller try to make sense of the final dot-name. +TODO: I don't under this sentence. What is allowDanglingDotName? +
+

4. Specifications

+

Specifications describe logical properties of Dafny methods, functions, +lambdas, iterators and loops. They specify preconditions, postconditions, +invariants, what memory locations may be read or modified, and +termination information by means of specification clauses. +For each kind of specification zero or more specification +clauses (of the type accepted for that type of specification) +may be given, in any order. +

+

We document specifications at these levels: +

+
    +
  • At the lowest level are the various kinds of specification clauses, +e.g. a RequiresClause_. +
  • +
  • Next are the specifications for entities that need them, +e.g. a MethodSpec. +
  • +
  • At the top level are the entity declarations that include +the specifications, e.g. MethodDecl. +
+ +

This section documents the first two of these in a bottom-up manner. +We first document the clauses and then the specifications +that use them. +

4.0. Specification Clauses

4.0.0. Requires Clause

+
RequiresClause_ = 
+    "requires" Expression(allowLemma: false, allowLambda: false)
+

The requires clauses specify preconditions for methods, +functions, lambda expressions and iterators. Dafny checks +that the preconditions are met at all call sites. The +callee may then assume the preconditions hold on entry. +

+

If no requires clause is specified it is taken to be true. +

+

If more than one requires clause is given, then the +precondition is the conjunction of all of the expressions +from all of the requires clauses. +

4.0.1. Ensures Clause

+
EnsuresClause_ = 
+    "ensures" { Attribute } Expression(allowLemma: false, allowLambda: false)
+ForAllEnsuresClause_ = 
+    "ensures" Expression(allowLemma: false, allowLambda: true)
+FunctionEnsuresClause_ = 
+    "ensures" Expression(allowLemma: false, allowLambda: false)
+

An ensures clause specifies the post condition for a +method, function or iterator. +

+

If no ensures clause is specified it is taken to be true. +

+

If more than one ensures clause is given, then the +postcondition is the conjunction of all of the expressions +from all of the ensures clauses. +

+

TODO: In the present sources FunctionEnsuresClause_ differs from +EnsuresClause_ only in that it is not allowed to specify +Attributes. This seems like a bug and will likely +be fixed in a future version. +

4.0.2. Decreases Clause

+
DecreasesClause_(allowWildcard, allowLambda) = 
+    "decreases" { Attribute } DecreasesList(allowWildcard, allowLambda)
+FunctionDecreasesClause_(allowWildcard, allowLambda) =
+    "decreases" DecreasesList(allowWildcard, allowLambda)
+
DecreasesList(allowWildcard, allowLambda) = 
+  PossiblyWildExpression(allowLambda) 
+  { "," PossiblyWildExpression(allowLambda) } 
+

If allowWildcard is false but one of the +PossiblyWildExpressions is a wild-card, an error is +reported. +

+

TODO: A FunctionDecreasesClause_ is not allowed to specify +Attributes. this will be fixed in a future version. +

+

Decreases clauses are used to prove termination in the +presence of recursion. if more than one decreases clause is given +it is as if a single decreases clause had been given with the +collected list of arguments. That is, +

+
decreases A, B
+decreases C, D
+

is equivalent to +

+
decreases A, B, C, D
+

If any of the expressions in the decreases clause are wild (i.e. “*”) +then proof of termination will be skipped. +

+

Termination metrics in Dafny, which are declared by decreases clauses, +are lexicographic tuples of expressions. At each recursive (or mutually +recursive) call to a function or method, Dafny checks that the effective +decreases clause of the callee is strictly smaller than the effective +decreases clause of the caller. +

+

What does “strictly smaller” mean? Dafny provides a built-in + well-founded order for every type and, in some cases, between types. For + example, the Boolean “false” is strictly smaller than “true”, the + integer 78 is strictly smaller than 102, the set {2,5} is strictly + smaller than the set {2,3,5}, and for “s” of type seq<Color> where + Color is some inductive datatype, the color s[0] is strictly less than + s (provided s is nonempty). +

+

What does “effective decreases clause” mean? Dafny always appends a +“top” element to the lexicographic tuple given by the user. This top +element cannot be syntactically denoted in a Dafny program and it never +occurs as a run-time value either. Rather, it is a fictitious value, +which here we will denote \top, such that each value that can ever occur +in a Dafny program is strictly less than \top. Dafny sometimes also +prepends expressions to the lexicographic tuple given by the user. The +effective decreases clause is any such prefix, followed by the +user-provided decreases clause, followed by \top. We said “user-provided +decreases clause”, but if the user completely omits a “decreases” clause, +then Dafny will usually make a guess at one, in which case the effective +decreases clause is any prefix followed by the guess followed by \top. +(If you're using the Dafny IDE in Visual Studio, you can hover the mouse +over the name of a recursive function or method, or the “while” keyword +for a loop, to see the “decreases” clause that Dafny guessed, if any.) +

+

Here is a simple but interesting example: the Fibonacci function. +

+
function Fib(n: nat) : nat
+{
+  if n < 2 then n else Fib(n-2) + Fib(n-1)
+}
+
+

In this example, if you hover your mouse over the function name +you will see that Dafny has supplied a **decreases** n clause. +

+

Let's take a look at the kind of example where a mysterious-looking +decreases clause like “Rank, 0” is useful. +

+

Consider two mutually recursive methods, A and B: +

+
method A(x: nat)
+{
+  B(x);
+}
+
+method B(x: nat)
+{
+  if x != 0 { A(x-1); }
+}
+

To prove termination of A and B, Dafny needs to have effective +decreases clauses for A and B such that: +

+
    +
  • +

    the measure for the callee B(x) is strictly smaller than the measure +for the caller A(x), and +

  • +
  • +

    the measure for the callee A(x-1) is strictly smaller than the measure +for the caller B(x). +

+ +

Satisfying the second of these conditions is easy, but what about the +first? Note, for example, that declaring both A and B with “decreases x” +does not work, because that won't prove a strict decrease for the call +from A(x) to B(x). +

+

Here's one possibility (for brevity, we will omit the method bodies): +

+
method A(x: nat)
+  decreases x, 1
+
+method B(x: nat)
+  decreases x, 0
+

For the call from A(x) to B(x), the lexicographic tuple "x, 0" is +strictly smaller than "x, 1", and for the call from B(x) to A(x-1), the +lexicographic tuple "x-1, 1" is strictly smaller than "x, 0". +

+

Two things to note: First, the choice of “0” and “1” as the second + components of these lexicographic tuples is rather arbitrary. It could + just as well have been “false” and “true”, respectively, or the sets + {2,5} and {2,3,5}. Second, the keyword decreases often gives rise to + an intuitive English reading of the declaration. For example, you might + say that the recursive calls in the definition of the familiar Fibonacci + function Fib(n) “decreases n”. But when the lexicographic tuple contains + constants, the English reading of the declaration becomes mysterious and + may give rise to questions like “how can you decrease the constant 0?”. + The keyword is just that—a keyword. It says “here comes a list of + expressions that make up the lexicographic tuple we want to use for the + termination measure”. What is important is that one effective decreases + clause is compared against another one, and it certainly makes sense to + compare something to a constant (and to compare one constant to + another). +

+

We can simplify things a little bit by remembering that Dafny appends + \top to the user-supplied decreases clause. For the A-and-B example, + this lets us drop the constant from the decreases clause of A: +

+
 method A(x: nat)
+   decreases x
+
+method B(x: nat)
+  decreases x, 0
+

The effective decreases clause of A is "x, \top" and the effective +decreases clause of B is "x, 0, \top". These tuples still satisfy the two +conditions (x, 0, \top) < (x, \top) and (x-1, \top) < (x, 0, \top). And +as before, the constant “0” is arbitrary; anything less than \top (which +is any Dafny expression) would work. +

+

Let's take a look at one more example that better illustrates the utility +of \top. Consider again two mutually recursive methods, call them Outer +and Inner, representing the recursive counterparts of what iteratively +might be two nested loops: +

+
method Outer(x: nat)
+{
+  // set y to an arbitrary non-negative integer
+  var y :| 0 <= y;
+  Inner(x, y);
+}
+
+method Inner(x: nat, y: nat)
+{
+  if y != 0 {
+    Inner(x, y-1);
+  } else if x != 0 {
+    Outer(x-1);
+  }
+}
+

The body of Outer uses an assign-such-that statement to represent some +computation that takes place before Inner is called. It sets “y” to some +arbitrary non-negative value. In a more concrete example, Inner would do +some work for each “y” and then continue as Outer on the next smaller +“x”. +

+

Using a decreases clause "x, y" for Inner seems natural, but if +we don't have any bound on the size of the "y" computed by Outer, +there is no expression we can write in decreases clause of Outer +that is sure to lead to a strictly smaller value for "y" when Inner +is called. \top to the rescue. If we arrange for the effective +decreases clause of Outer to be "x, \top" and the effective decreases +clause for Inner to be "x, y, \top", then we can show the strict +decreases as required. Since \top is implicitly appended, the two +decreases clauses declared in the program text can be: +

+
method Outer(x: nat)
+  decreases x
+
+method Inner(x: nat, y: nat)
+  decreases x, y
+

Moreover, remember that if a function or method has no user-declared +decreases clause, Dafny will make a guess. The guess is (usually) +the list of arguments of the function/method, in the order given. This is +exactly the decreases clauses needed here. Thus, Dafny successfully +verifies the program without any explicit decreases clauses: +

+
method Outer(x: nat)
+{
+  var y :| 0 <= y;
+  Inner(x, y);
+}
+
+method Inner(x: nat, y: nat)
+{
+  if y != 0 {
+    Inner(x, y-1);
+  } else if x != 0 {
+    Outer(x-1);
+  }
+}
+

The ingredients are simple, but the end result may seem like magic. For many users, however, there may be no magic at all – the end result may be so natural that the user never even has to bothered to think about that there was a need to prove termination in the first place. +

4.0.3. Framing

+
FrameExpression(allowLemma, allowLambda) = 
+  ( Expression(allowLemma, allowLambda) [ FrameField ] 
+  | FrameField )
+
FrameField = "`" Ident 
+
PossiblyWildFrameExpression(allowLemma) = 
+    ( "*" | FrameExpression(allowLemma, allowLambda: false) )
+

Frame expressions are used to denote the set of memory locations +that a Dafny program element may read or write. A frame +expression is a set expression. The form {} (that is, the empty set) +says that no memory locations may be modified, +which is also the default if no modifies clause is given explicitly. +

+

Note that framing only applies to the heap, or memory accessed through +references. Local variables are not stored on the heap, so they cannot be +mentioned (well, they are not in scope in the declaration) in reads +annotations. Note also that types like sets, sequences, and multisets are +value types, and are treated like integers or local variables. Arrays and +objects are reference types, and they are stored on the heap (though as +always there is a subtle distinction between the reference itself and the +value it points to.) +

+

The FrameField construct is used to specify a field of a +class object. The identifier following the back-quote is the +name of the field being referenced. +If the FrameField is preceded by an expression the expression +must be a reference to an object having that field. +If the FrameField is not preceded by an expression then +the frame expression is referring to that field of the current +object. This form is only used from a method of a class. +

+

The use of FrameField is discouraged as in practice it has not +been shown to either be more concise or to perform better. +Also, there's (unfortunately) no form of it for array +elements—one could imagine +

+
  modifies a`[j]
+

Also, FrameField is not taken into consideration for +lambda expressions. +

4.0.4. Reads Clause

+
FunctionReadsClause_ = 
+  "reads" 
+  PossiblyWildFrameExpression (allowLemma: false)
+  { "," PossiblyWildFrameExpression(allowLemma: false) }
+LambdaReadsClause_ = 
+  "reads" PossiblyWildFrameExpression(allowLemma: true)
+IteratorReadsClause_ = 
+  "reads" { Attribute } 
+  FrameExpression(allowLemma: false, allowLambda: false) 
+  { "," FrameExpression(allowLemma: false, allowLambda: false) }
+PossiblyWildExpression(allowLambda) = 
+    ( "*" | Expression(allowLemma: false, allowLambda) ) 
+

Functions are not allowed to have side effects but may be restricted in +what they can read. The reading frame of a function (or predicate) is all +the memory locations that the function is allowed to read. The reason we +might limit what a function can read is so that when we write to memory, +we can be sure that functions that did not read that part of memory have +the same value they did before. For example, we might have two arrays, +one of which we know is sorted. If we did not put a reads annotation on +the sorted predicate, then when we modify the unsorted array, we cannot +determine whether the other array stopped being sorted. While we might be +able to give invariants to preserve it in this case, it gets even more +complex when manipulating data structures. In this case, framing is +essential to making the verification process feasible. +

+

It is not just the body of a function that is subject to reads +checks, but also its precondition and the reads clause itself. +

+

A reads clause can list a wildcard (“*”), which allows the enclosing +function to read anything. In many cases, and in particular in all cases +where the function is defined recursively, this makes it next to +impossible to make any use of the function. Nevertheless, as an +experimental feature, the language allows it (and it is sound). +Note that a “*” makes the rest of the frame expression irrelevant. +

+

A reads clause specifies the set of memory locations that a function, +lambda, or iterator may read. If more than one reads clause is given +in a specification the effective read set is the union of the sets +specified. If there are no reads clauses the effective read set is +empty. If "*" is given in a reads clause it means any memory may be +read. +

+

TODO: It would be nice if the different forms of read clauses could be +combined. In a future version the single form of read clause will allow +a list and attributes. +

4.0.5. Modifies Clause

+
ModifiesClause_ = 
+  "modifies" { Attribute } 
+  FrameExpression(allowLemma: false, allowLambda: false) 
+  { "," FrameExpression(allowLemma: false, allowLambda: false) }
+

Frames also affect methods. As you might have guessed, methods are not +required to list the things they read. Methods are allowed to read +whatever memory they like, but they are required to list which parts of +memory they modify, with a modifies annotation. They are almost identical +to their reads cousins, except they say what can be changed, rather than +what the value of the function depends on. In combination with reads, +modification restrictions allow Dafny to prove properties of code that +would otherwise be very difficult or impossible. Reads and modifies are +one of the tools that allow Dafny to work on one method at a time, +because they restrict what would otherwise be arbitrary modifications of +memory to something that Dafny can reason about. +

+

Note that fields of newly allocated objects can always be modified. +

+

It is also possible to frame what can be modified by a block statement +by means of the block form of the +modify statement (Section 21.16). +

+

A modifies clause specifies the set of memory locations that a +method, iterator or loop body may modify. If more than one modifies +clause is given in a specification, the effective modifies set is the +union of the sets specified. If no modifies clause is given the +effective modifies set is empty. A loop can also have a +modifies clause. If none is given, the loop gets to modify anything +the enclosing context is allowed to modify. +

4.0.6. Invariant Clause

+
InvariantClause_ = 
+    "invariant" { Attribute } 
+    Expression(allowLemma: false, allowLambda: true)
+

An invariant clause is used to specify an invariant +for a loop. If more than one invariant clause is given for +a loop the effective invariant is the conjunction of +the conditions specified. +

+

The invariant must hold on entry to the loop. And assuming it +is valid on entry, Dafny must be able to prove that it then +holds at the end of the loop. +

4.1. Method Specification

+
MethodSpec = 
+  { ModifiesClause_
+  | RequiresClause_
+  | EnsuresClause_
+  | DecreasesClause_(allowWildcard: true, allowLambda: false)
+  } 
+

A method specification is zero or more modifies, requires, +ensures or decreases clauses, in any order. +A method does not have reads clauses because methods are allowed to +read any memory. +

4.2. Function Specification

+
FunctionSpec =
+  { RequiresClause_
+  | FunctionReadsClause_
+  | FunctionEnsuresClause_
+  | FunctionDecreasesClause_(allowWildcard: false, allowLambda: false)
+  } 
+

A function specification is zero or more reads, requires, +ensures or decreases clauses, in any order. A function +specification does not have modifies clauses because functions are not +allowed to modify any memory. +

4.3. Lambda Specification

+
LambdaSpec_ = 
+  { LambdaReadsClause_
+  | RequiresClause_ 
+  } 
+

A lambda specification is zero or more reads or requires clauses. +Lambda specifications do not have ensures clauses because the body +is never opaque. +Lambda specifications do not have decreases +clauses because they do not have names and thus cannot be recursive. A +lambda specification does not have modifies clauses because lambdas +are not allowed to modify any memory. +

4.4. Iterator Specification

+
IteratorSpec =
+  { IteratorReadsClause_
+  | ModifiesClause_
+  | [ "yield" ] RequiresClause_
+  | [ "yield" ] EnsuresClause_
+  | DecreasesClause_(allowWildcard: false, allowLambda: false)
+  } 
+

An iterator specification applies both to the iterator's constructor +method and to its MoveNext method. The reads and modifies +clauses apply to both of them. For the requires and ensures +clauses, if yield is not present they apply to the constructor, +but if yield is present they apply to the MoveNext method. +

+

TODO: What is the meaning of a decreases clause on an iterator? +Does it apply to MoveNext? Make sure our description of +iterators explains these. +

+

TODO: What is the relationship between the post condition and +the Valid() predicate? + +

4.5. Loop Specification

+
LoopSpec =
+  { InvariantClause_
+  | DecreasesClause_(allowWildcard: true, allowLambda: true)
+  | ModifiesClause_ 
+  } 
+

A loop specification provides the information Dafny needs to +prove properties of a loop. The InvariantClause_ clause +is effectively a precondition and it along with the +negation of the loop test condition provides the postcondition. +The DecreasesClause_ clause is used to prove termination. +

5. Types

+
Type = DomainType [ "->" Type ] 
+

A Dafny type is a domain type (i.e. a type that can be the domain of a +function type) optionally followed by an arrow and a range type. +

+
DomainType =
+  ( BoolType_ | CharType_ | NatType_ | IntType_ | RealType_ | ObjectType_
+  | FiniteSetType_ | InfiniteSetType_ | MultisetType_ 
+  | SequenceType_ | StringType_
+  | FiniteMapType_ | InfiniteMapType_ | ArrayType_
+  | TupleType_ | NamedType_ ) 
+

The domain types comprise the builtin scalar types, the builtin +collection types, tuple types (including as a special case +a parenthesized type) and reference types. +

+

Dafny types may be categorized as either value types or reference types. +

5.0. Value Types

+

The value types are those whose values do not lie in the program heap. +These are: +

+
    +
  • The basic scalar types: bool, char, nat, int, real +
  • +
  • The built-in collection types: set, multiset, seq, string, map, imap +
  • +
  • Tuple Types +
  • +
  • Inductive and co-inductive types +
+ +

Data items having value types are passed by value. Since they are not +considered to occupy memory, framing expressions do not reference them. +

5.1. Reference Types

+

Dafny offers a host of reference types. These represent +references to objects allocated dynamically in the program heap. To +access the members of an object, a reference to (that is, a pointer +to or object identity of) the object is dereferenced. +

+

The reference types are class types, traits and array types. +

+

The special value null is part of every reference +type.0 +

5.2. Named Types

+
NamedType_ = NameSegmentForTypeName { "." NameSegmentForTypeName }
+

A NamedType_ is used to specify a user-defined type by name +(possibly module-qualified). Named types are introduced by +class, trait, inductive, co-inductive, synonym and opaque +type declarations. They are also used to refer to type variables. +

+
NameSegmentForTypeName = Ident  [ GenericInstantiation ] 
+

A NameSegmentForTypeName is a type name optionally followed by a +GenericInstantiation which supplies type parameters to a generic +type, if needed. It is a special case of a NameSegment +(See Section 22.35) +that does not allow a HashCall. +

+

The following sections describe each of these kinds of types in more detail. +

6. Basic types

+

Dafny offers these basic types: bool for booleans, char for +characters, int and nat for integers, and real for reals. +

6.0. Booleans

+
BoolType_ = "bool" 
+

There are two boolean values and each has a corresponding literal in +the language: false and true. +

+

In addition to equality (==) and disequality (!=), which are +defined on all types, type bool supports the following operations: +

+ + + + + + + + + + + + +
operator description
<==> equivalence (if and only if)
==> implication (implies)
<== reverse implication (follows from)
&& conjunction (and)
|| disjunction (or)
! negation (not)
+

Negation is unary; the others are binary. The table shows the operators +in groups of increasing binding power, with equality binding stronger +than conjunction and disjunction, and weaker than negation. Within +each group, different operators do not associate, so parentheses need +to be used. For example, +

+
A && B || C    // error
+

would be ambiguous and instead has to be written as either +

+
(A && B) || C
+

or +

+
A && (B || C)
+

depending on the intended meaning. +

6.0.0. Equivalence Operator

+

The expressions A <==> B and A == B give the same value, but note +that <==> is associative whereas == is chaining. So, +

+
A <==> B <==> C
+

is the same as +

+
A <==> (B <==> C)
+

and +

+
(A <==> B) <==> C
+

whereas +

+
A == B == C
+

is simply a shorthand for +

+
A == B && B == C

6.0.1. Conjunction and Disjunction

+

Conjunction is associative and so is disjunction. These operators are +short circuiting (from left to right), meaning that their second +argument is evaluated only if the evaluation of the first operand does +not determine the value of the expression. Logically speaking, the +expression A && B is defined when A is defined and either A +evaluates to false or B is defined. When A && B is defined, its +meaning is the same as the ordinary, symmetric mathematical +conjunction ∧. The same holds for || and ∨. +

6.0.2. Implication and Reverse Implication

+

Implication is right associative and is short-circuiting from left +to right. Reverse implication B <== A is exactly the same as +A ==> B, but gives the ability to write the operands in the opposite +order. Consequently, reverse implication is left associative and is +short-circuiting from right to left. To illustrate the +associativity rules, each of the following four lines expresses the +same property, for any A, B, and C of type bool: +

+
A ==> B ==> C
+A ==> (B ==> C)  // parentheses redundant, since ==> is right associative
+C <== B <== A
+(C <== B) <== A  // parentheses redundant, since <== is left associative
+

To illustrate the short-circuiting rules, note that the expression +a.Length is defined for an array a only if a is not null (see +Section 5.1), which means the following two +expressions are well-formed: +

+
a != null ==> 0 <= a.Length
+0 <= a.Length <== a != null
+

The contrapositive of these two expressions would be: +

+
a.Length < 0 ==> a == null  // not well-formed
+a == null <== a.Length < 0  // not well-formed
+

but these expressions are not well-formed, since well-formedness +requires the left (and right, respectively) operand, a.Length < 0, +to be well-formed by itself. +

+

Implication A ==> B is equivalent to the disjunction !A || B, but +is sometimes (especially in specifications) clearer to read. Since, +|| is short-circuiting from left to right, note that +

+
a == null || 0 <= a.Length
+

is well-formed, whereas +

+
0 <= a.Length || a == null  // not well-formed
+

is not. +

+

In addition, booleans support logical quantifiers (forall and +exists), described in section 22.30. +

6.1. Numeric types

+
IntType_ = "int" 
+RealType_ = "real" 
+

Dafny supports numeric types of two kinds, integer-based, which +includes the basic type int of all integers, and real-based, which +includes the basic type real of all real numbers. User-defined +numeric types based on int and real, called newtypes, are +described in Section 19. Also, the subset type +nat, representing the non-negative subrange of int, is described +in Section 20. +

+

The language includes a literal for each non-negative integer, like +0, 13, and 1985. Integers can also be written in hexadecimal +using the prefix “0x”, as in 0x0, 0xD, and 0x7c1 (always with +a lower case x, but the hexadecimal digits themselves are case +insensitive). Leading zeros are allowed. To form negative integers, +use the unary minus operator. +

+

There are also literals for some of the non-negative reals. These are +written as a decimal point with a nonempty sequence of decimal digits +on both sides. For example, 1.0, 1609.344, and 0.5772156649. +

+

For integers (in both decimal and hexadecimal form) and reals, +any two digits in a literal may be separated by an underscore in order +to improve human readability of the literals. For example: +

+
1_000_000        // easier to read than 1000000
+0_12_345_6789    // strange but legal formatting of 123456789
+0x8000_0000      // same as 0x80000000 -- hex digits are often placed in groups of 4
+0.000_000_000_1  // same as 0.0000000001 -- 1 Ångström
+

In addition to equality and disequality, numeric types +support the following relational operations: +

+ + + + + + + +
operator description
< less than
<= at most
>= at least
> greater than
+

Like equality and disequality, these operators are chaining, as long +as they are chained in the “same direction”. That is, +

+
A <= B < C == D <= E
+

is simply a shorthand for +

+
A <= B && B < C && C == D && D <= E
+

whereas +

+
A < B > C
+

is not allowed. +

+

There are also operators on each numeric type: +

+ + + + + + + + + + + +
operator description
+ addition (plus)
- subtraction (minus)
* multiplication (times)
/ division (divided by)
% modulus (mod)
- negation (unary minus)
+

The binary operators are left associative, and they associate with +each other in the two groups. The groups are listed in order of +increasing binding power, with equality binding more strongly than the +multiplicative operators and weaker than the unary operator. +Modulus is supported only for integer-based numeric types. Integer +division and modulus are the Euclidean division and modulus. This +means that modulus always returns a non-negative, regardless of the +signs of the two operands. More precisely, for any integer a and +non-zero integer b, +

+
a == a / b * b + a % b
+0 <= a % b < B
+

where B denotes the absolute value of b. +

+

Real-based numeric types have a member Trunc that returns the +floor of the real value, that is, the largest integer not exceeding +the real value. For example, the following properties hold, for any +r and r' of type real: +

+
3.14.Trunc == 3
+(-2.5).Trunc == -3
+-2.5.Trunc == -2
+real(r.Trunc) <= r
+r <= r' ==> r.Trunc <= r'.Trunc
+

Note in the third line that member access (like .Trunc) binds +stronger than unary minus. The fourth line uses the conversion +function real from int to real, as described in Section +19.0. +

6.2. Characters

+
CharType_ = "char" 
+

Dafny supports a type char of characters. Character literals are +enclosed in single quotes, as in 'D'. Their form is described +by the charToken nonterminal in the grammar. To write a single quote as a +character literal, it is necessary to use an escape sequence. +Escape sequences can also be used to write other characters. The +supported escape sequences are as follows: +

+ + + + + + + + + + + +
escape sequence meaning
\' the character '
\" the character "
\\ the character \
\0 the null character, same as \u0000
\n line feed
\r carriage return
\t horizontal tab
\uxxxx universal character whose hexadecimal code is xxxx
+

The escape sequence for a double quote is redundant, because +'"' and '\"' denote the same +character—both forms are provided in order to support the same +escape sequences as for string literals (Section 9.2.4). +In the form \uxxxx, the u is always lower case, but the four +hexadecimal digits are case insensitive. +

+

Character values are ordered and can be compared using the standard +relational operators: +

+ + + + + + + +
operator description
< less than
<= at most
>= at least
> greater than
+

Sequences of characters represent strings, as described in Section +9.2.4. +

+

The only other operations on characters are obtaining a character +by indexing into a string, and the implicit conversion to string +when used as a parameter of a print statement. +

+

TODO: Are there any conversions between char values and numeric values? +

7. Type parameters

+
GenericParameters = "<" TypeVariableName [ "(" "==" ")" ]
+      { "," TypeVariableName [ "(" "==" ")" ] } ">" 
+

Many of the types (as well as functions and methods) in Dafny can be +parameterized by types. These type parameters are typically +declared inside angle brackets and can stand for any type. +

+

It is sometimes necessary to restrict these type parameters so that +they can only be instantiated by certain families of types. As such, +Dafny distinguishes types that support the equality operation +not only in ghost contexts but also in compiled contexts. To indicate +that a type parameter is restricted to such equality supporting +types, the name of the type parameter takes the suffix +“(==)”.1 For example, +

+
method Compare<T(==)>(a: T, b: T) returns (eq: bool)
+{
+  if a == b { eq := true; } else { eq := false; }
+}
+

is a method whose type parameter is restricted to equality-supporting +types. Again, note that all types support equality in ghost +contexts; the difference is only for non-ghost (that is, compiled) +code. Co-inductive datatypes, function types, as well as inductive +datatypes with ghost parameters are examples of types that are not +equality supporting. +

+

Dafny has some inference support that makes certain signatures less +cluttered (described in a different part of the Dafny language +reference). In some cases, this support will +infer that a type parameter must be restricted to equality-supporting +types, in which case Dafny adds the “(==)” automatically. +

+

TODO: Need to describe type inference somewhere. +

8. Generic Instantiation

+
GenericInstantiation = "<" Type { "," Type } ">" 
+

When a generic entity is used, actual types must be specified for each +generic parameter. This is done using a GenericInstantiation. +If the GenericInstantiation is omitted, type inference will try +to fill these in. +

9. Collection types

+

Dafny offers several built-in collection types. +

9.0. Sets

+
FiniteSetType_ = "set" [ GenericInstantiation ]
+InfiniteSetType_ = "iset" [ GenericInstantiation ]
+

For any type T, each value of type set<T> is a finite set of +T values. +

+

TODO: +Set membership is determined by equality in the type T, +so set<T> can be used in a non-ghost context only if T is equality +supporting. +

+

For any type T, each value of type iset<T> is a potentially infinite +set of T values. +

+

A set can be formed using a set display expression, which is a +possibly empty, unordered, duplicate-insensitive list of expressions +enclosed in curly braces. To illustrate, +

+
{}        {2, 7, 5, 3}        {4+2, 1+5, a*b}
+

are three examples of set displays. There is also a set comprehension +expression (with a binder, like in logical quantifications), described in +section 22.31. +

+

In addition to equality and disequality, set types +support the following relational operations: +

+ + + + + + + +
operator description
< proper subset
<= subset
>= superset
> proper superset
+

Like the arithmetic relational operators, these operators are +chaining. +

+

Sets support the following binary operators, listed in order of +increasing binding power: +

+ + + + + + + + + +
operator description
!! disjointness
+ set union
- set difference
* set intersection
+

The associativity rules of +, -, and * are like those of the +arithmetic operators with the same names. The expression A !! B, +whose binding power is the same as equality (but which neither +associates nor chains with equality), says that sets A and B have +no elements in common, that is, it is equivalent to +

+
A * B == {}
+

However, the disjointness operator is chaining, so A !! B !! C !! D +means: +

+
A * B == {} && (A + B) * C == {} && (A + B + C) * D == {}
+

In addition, for any set s of type set<T> or iset<T> and any +expression e of type T, sets support the following operations: +

+ + + + + + +
expression description
|s| set cardinality
e in s set membership
e !in s set non-membership
+

The expression e !in s is a syntactic shorthand for !(e in s). +

9.1. Multisets

+
MultisetType_ = "multiset" [ GenericInstantiation ] 
+

A multiset is similar to a set, but keeps track of the multiplicity +of each element, not just its presence or absence. For any type T, +each value of type multiset<T> is a map from T values to natural +numbers denoting each element's multiplicity. Multisets in Dafny +are finite, that is, they contain a finite number of each of a finite +set of elements. Stated differently, a multiset maps only a finite +number of elements to non-zero (finite) multiplicities. +

+

Like sets, multiset membership is determined by equality in the type +T, so multiset<T> can be used in a non-ghost context only if T +is equality supporting. +

+

A multiset can be formed using a multiset display expression, which +is a possibly empty, unordered list of expressions enclosed in curly +braces after the keyword multiset. To illustrate, +

+
multiset{}    multiset{0, 1, 1, 2, 3, 5}    multiset{4+2, 1+5, a*b}
+

are three examples of multiset displays. There is no multiset +comprehension expression. +

+

In addition to equality and disequality, multiset types +support the following relational operations: +

+ + + + + + + +
operator description
< proper multiset subset
<= multiset subset
>= multiset superset
> proper multiset superset
+

Like the arithmetic relational operators, these operators are +chaining. +

+

Multisets support the following binary operators, listed in order of +increasing binding power: +

+ + + + + + + + + +
operator description
!! multiset disjointness
+ multiset union
- multiset difference
* multiset intersection
+

The associativity rules of +, -, and * are like those of the +arithmetic operators with the same names. The + operator +adds the multiplicity of corresponding elements, the - operator +subtracts them (but 0 is the minimum multiplicity), +and the * has multiplicity that is the minimum of the +multiplicity of the operands. +

+

The expression A !! B +says that multisets A and B have no elements in common, that is, +it is equivalent to +

+
A * B == multiset{}
+

Like the analogous set operator, !! is chaining. +

+

In addition, for any multiset s of type multiset<T>, +expression e of type T, and non-negative integer-based numeric +n, multisets support the following operations: +

+ + + + + + + + +
expression description
|s| multiset cardinality
e in s multiset membership
e !in s multiset non-membership
s[e] multiplicity of e in s
s[e := n] multiset update (change of multiplicity)
+

The expression e in s returns true if and only if s[e] != 0. +The expression e !in s is a syntactic shorthand for !(e in s). +The expression s[e := n] denotes a multiset like +s, but where the multiplicity of element e is n. Note that +the multiset update s[e := 0] results in a multiset like s but +without any occurrences of e (whether or not s has occurrences of +e in the first place). As another example, note that +s - multiset{e} is equivalent to: +

+
if e in s then s[e := s[e] - 1] else s

9.2. Sequences

+
SequenceType_ = "seq" [ GenericInstantiation ] 
+

For any type T, a value of type seq<T> denotes a sequence of T +elements, that is, a mapping from a finite downward-closed set of natural +numbers (called indices) to T values. (Thinking of it as a map, +a sequence is therefore something of a dual of a multiset.) +

9.2.0. Sequence Displays

+

A sequence can be formed using a sequence display expression, which +is a possibly empty, ordered list of expressions enclosed in square +brackets. To illustrate, +

+
[]        [3, 1, 4, 1, 5, 9, 3]        [4+2, 1+5, a*b]
+

are three examples of sequence displays. There is no sequence +comprehension expression. +

9.2.1. Sequence Relational Operators

+

In addition to equality and disequality, sequence types +support the following relational operations: +

+ + + + + +
operator description
< proper prefix
<= prefix
+

Like the arithmetic relational operators, these operators are +chaining. Note the absence of > and >=. +

9.2.2. Sequence Concatenation

+

Sequences support the following binary operator: +

+ + + + +
operator description
+ concatenation
+

Operator + is associative, like the arithmetic operator with the +same name. +

9.2.3. Other Sequence Expressions

+

In addition, for any sequence s of type seq<T>, expression e +of type T, integer-based numeric i satisfying 0 <= i < |s|, and +integer-based numerics lo and hi satisfying +0 <= lo <= hi <= |s|, sequences support the following operations: +

+ + + + + + + + + + + + + +
expression description
|s| sequence length
s[i] sequence selection
s[i := e] sequence update
e in s sequence membership
e !in s sequence non-membership
s[lo..hi] subsequence
s[lo..] drop
s[..hi] take
s[slices] slice
multiset(s) sequence conversion to a multiset<T>
+

Expression s[i := e] returns a sequence like s, except that the +element at index i is e. The expression e in s says there +exists an index i such that s[i] == e. It is allowed in non-ghost +contexts only if the element type T is equality supporting. +The expression e !in s is a syntactic shorthand for !(e in s). +

+

Expression s[lo..hi] yields a sequence formed by taking the first +hi elements and then dropping the first lo elements. The +resulting sequence thus has length hi - lo. Note that s[0..|s|] +equals s. If the upper bound is omitted, it +defaults to |s|, so s[lo..] yields the sequence formed by dropping +the first lo elements of s. If the lower bound is omitted, it +defaults to 0, so s[..hi] yields the sequence formed by taking the +first hi elements of s. +

+

In the sequence slice operation, slices is a nonempty list of +length designators separated and optionally terminated by a colon, and +there is at least one colon. Each length designator is a non-negative +integer-based numeric, whose sum is no greater than |s|. If there +are k colons, the operation produces k + 1 consecutive subsequences +from s, each of the length indicated by the corresponding length +designator, and returns these as a sequence of +sequences.2 If slices is terminated by a +colon, then the length of the last slice extends until the end of s, +that is, its length is |s| minus the sum of the given length +designators. For example, the following equalities hold, for any +sequence s of length at least 10: +

+
var t := [3.14, 2.7, 1.41, 1985.44, 100.0, 37.2][1:0:3];
+assert |t| == 3 && t[0] == [3.14] && t[1] == [];
+assert t[2] == [2.7, 1.41, 1985.44];
+var u := [true, false, false, true][1:1:];
+assert |u| == 3 && u[0][0] && !u[1][0] && u[2] == [false, true];
+assert s[10:][0] == s[..10];
+assert s[10:][1] == s[10..];
+

The operation multiset(s) yields the multiset of elements of +sequence s. It is allowed in non-ghost contexts only if the element +type T is equality supporting. +

9.2.4. Strings

+
StringType_ = "string" 
+

A special case of a sequence type is seq<char>, for which Dafny +provides a synonym: string. Strings are like other sequences, but +provide additional syntax for sequence display expressions, namely +string literals. There are two forms of the syntax for string +literals: the standard form and the verbatim form. +

+

String literals of the standard form are enclosed in double quotes, as +in "Dafny". To include a double quote in such a string literal, +it is necessary to use an escape sequence. Escape sequences can also +be used to include other characters. The supported escape sequences +are the same as those for character literals, see Section 6.2. +For example, the Dafny expression "say \"yes\"" represents the +string 'say "yes"'. +The escape sequence for a single quote is redundant, because +"'" and "\'" denote the same +string—both forms are provided in order to support the same +escape sequences as for character literals. +

+

String literals of the verbatim form are bracketed by +@" and ", as in @"Dafny". To include +a double quote in such a string literal, it is necessary to use the +escape sequence "", that is, to write the character +twice. In the verbatim form, there are no other escape sequences. +Even characters like newline can be written inside the string literal +(hence spanning more than one line in the program text). +

+

For example, the following three expressions denote the same string: +

+
"C:\\tmp.txt"
+@"C:\tmp.txt"
+['C', ':', '\\', 't', 'm', 'p', '.', 't', 'x', 't']
+

Since strings are sequences, the relational operators < +and <= are defined on them. Note, however, that these operators +still denote proper prefix and prefix, respectively, not some kind of +alphabetic comparison as might be desirable, for example, when +sorting strings. +

9.3. Finite and Infinite Maps

+
FiniteMapType_ = "map" [ GenericInstantiation ] 
+InfiniteMapType_ = "imap" [ GenericInstantiation ] 
+

For any types T and U, a value of type map<T,U> denotes a +(finite) map +from T to U. In other words, it is a look-up table indexed by +T. The domain of the map is a finite set of T values that have +associated U values. Since the keys in the domain are compared +using equality in the type T, type map<T,U> can be used in a +non-ghost context only if T is equality supporting. +

+

Similarly, for any types T and U, a value of type imap<T,U> +denotes a (possibly) infinite map. In most regards, imap<T,U> is +like map<T,U>, but a map of type imap<T,U> is allowed to have an +infinite domain. +

+

A map can be formed using a map display expression (see MapDisplayExpr), +which is a possibly empty, ordered list of maplets, each maplet having the +form t := u where t is an expression of type T and u is an +expression of type U, enclosed in square brackets after the keyword +map. To illustrate, +

+
map[]    map[20 := true, 3 := false, 20 := false]    map[a+b := c+d]
+

are three examples of map displays. By using the keyword imap +instead of map, the map produced will be of type imap<T,U> +instead of map<T,U>. Note that an infinite map (imap) is allowed +to have a finite domain, whereas a finite map (map) is not allowed +to have an infinite domain. +If the same key occurs more than +once, only the last occurrence appears in the resulting +map.3 There is also a map comprehension expression, +explained in section 22.34. +

+

For any map fm of type map<T,U>, +any map m of type map<T,U> or imap<T,U>, +any expression t of type T, +any expression u of type U, and any d in the domain of m (that +is, satisfying d in m), maps support the following operations: +

+ + + + + + + + +
expression description
|fm| map cardinality
m[d] map selection
m[t := u] map update
t in m map domain membership
t !in m map domain non-membership
+

|fm| denotes the number of mappings in fm, that is, the +cardinality of the domain of fm. Note that the cardinality operator +is not supported for infinite maps. +Expression m[d] returns the U value that m associates with d. +Expression m[t := u] is a map like m, except that the +element at key t is u. The expression t in m says t is in the +domain of m and t !in m is a syntactic shorthand for +!(t in m).4 +

+

Here is a small example, where a map cache of type map<int,real> +is used to cache computed values of Joule-Thomson coefficients for +some fixed gas at a given temperature: +

+
if K in cache {  // check if temperature is in domain of cache
+  coeff := cache[K];  // read result in cache
+} else {
+  coeff := ComputeJouleThomsonCoefficient(K);  // do expensive computation
+  cache := cache[K := coeff];  // update the cache
+}

10. Types that stand for other types

+
SynonymTypeDecl = 
+  ( SynonymTypeDefinition_ | OpaqueTypeDefinition_ ) [ ";" ] 
+

It is sometimes useful to know a type by several names or to treat a +type abstractly. Synonym and opaque types serve this purpose. +

10.0. Type synonyms

+
SynonymTypeDefinition_ =
+  "type" { Attribute } SynonymTypeName [ GenericParameters ] "=" Type
+

A type synonym declaration: +

+
type Y<T> = G
+

declares Y<T> to be a synonym for the type G. Here, T is a +nonempty list of type parameters (each of which is optionally +designated with the suffix “(==)”), which can be used as free type +variables in G. If the synonym has no type parameters, the “<T>” +is dropped. In all cases, a type synonym is just a synonym. That is, +there is never a difference, other than possibly in error messages +produced, between Y<T> and G. +

+

For example, the names of the following type synonyms may improve the +readability of a program: +

+
type Replacements<T> = map<T,T>
+type Vertex = int
+

As already described in Section 9.2.4, string is a built-in +type synonym for seq<char>, as if it would have been declared as +follows: +

+
type string = seq<char>

10.1. Opaque types

+
OpaqueTypeDefinition_ = "type" { Attribute } SynonymTypeName 
+  [ "(" "==" ")" ] [ GenericParameters ] 
+

A special case of a type synonym is one that is underspecified. Such +a type is declared simply by: +

+
type Y<T>
+

It is known as an opaque type. Its definition can be revealed in a +refining module. To indicate that Y designates an +equality-supporting type, “(==)” can be written immediately +following the name “Y”. +

+

For example, the declarations +

+
type T
+function F(t: T): T
+

can be used to model an uninterpreted function F on some +arbitrary type T. As another example, +

+
type Monad<T>
+

can be used abstractly to represent an arbitrary parameterized monad. +

11. Well-founded Functions and Extreme Predicates

+

This section is a tutorial on well-founded functions and extreme predicates. +We place it here in preparation for Section 12 +where function and predicate definitions are described. +

+

Recursive functions are a core part of computer science and mathematics. +Roughly speaking, when the definition of such a function spells out a +terminating computation from given arguments, we may refer to +it as a well-founded function. For example, the common factorial and +Fibonacci functions are well-founded functions. +

+

There are also other ways to define functions. An important case +regards the definition of a boolean function as an extreme solution +(that is, a least or greatest solution) to some equation. For +computer scientists with interests in logic or programming languages, +these extreme predicates are important because they describe the +judgments that can be justified by a given set of inference rules +(see, e.g., [3, 24, 28, 31, 36]). +

+

To benefit from machine-assisted reasoning, it is necessary not just +to understand extreme predicates but also to have techniques for +proving theorems about them. A foundation for this reasoning was +developed by Paulin-Mohring [29] and is the +basis of the constructive logic supported by Coq [1] as well +as other proof assistants [2, 34]. Essentially, the idea is to represent the +knowledge that an extreme predicate holds by the proof term by which +this knowledge was derived. For a predicate defined as the least +solution, such proof terms are values of an inductive datatype (that +is, finite proof trees), and for the greatest solution, a coinductive +datatype (that is, possibly infinite proof trees). This means that +one can use induction and coinduction when reasoning about these proof +trees. Therefore, these extreme predicates are known as, +respectively, inductive predicates and coinductive predicates (or, +co-predicates for short). Support for extreme predicates is also +available in the proof assistants Isabelle [30] and HOL +[6]. +

+

Dafny supports both well-founded functions and extreme predicates. +This section is a tutorial that describes the difference in general +terms, and then describes novel syntactic support in Dafny for +defining and proving lemmas with extreme predicates. Although Dafny's +verifier has at its core a first-order SMT solver, Dafny's logical +encoding makes it possible to reason about fixpoints in an automated +way. +

+

The encoding for coinductive predicates in Dafny was described previously +[21] and is here described in Section +18.2. +

11.0. Function Definitions

+

To define a function $f \colon X \to Y$ in terms of itself, one can +write an equation like +

+
(0) + +
\[ f \Equal \F(f) +\]
+

where $\mathcal{F}$ is a non-recursive function of type +$(X \to Y) \to X \to Y$. Because it takes a function as an argument, +$\mathcal{F}$ is referred to as a functor (or functional, but not to be +confused by the category-theory notion of a functor). +Throughout, I will assume that $\F(f)$ by itself is well defined, +for example that it does not divide by zero. I will also assume that $f$ occurs +only in fully applied calls in $\F(f)$; eta expansion can be applied to +ensure this. If $f$ is a boolean function, that is, if $Y$ is +the type of booleans, then I call $f$ a predicate. +

+

For example, the common Fibonacci function over the +natural numbers can be defined by the equation +

+
(1) + +
\[ \fib \Equal + \lambda n \bullet\; \ite{n < 2}{n}{\fib(n-2) + \fib(n-1)} +\]
+

With the understanding that the argument $n$ is universally +quantified, we can write this equation equivalently as +

+
(2) + +
\[ \fib(n) \Equal + \ite{n < 2}{n}{\fib(n-2) + \fib(n-1)} +\]
+

The fact that the function being defined occurs on both sides of the equation +causes concern that we might not be defining the function properly, leading to a +logical inconsistency. In general, there +could be many solutions to an equation like (0) or there could be none. +Let's consider two ways to make sure we're defining the function uniquely. +

11.0.0. Well-founded Functions

+

A standard way to ensure that equation (0) has a unique solution in $f$ is +to make sure the recursion is well-founded, which roughly means that the +recursion terminates. This is done by introducing any well-founded +relation $\Less$ on the domain of $f$ and making sure that the argument to each recursive +call goes down in this ordering. More precisely, if we formulate (0) as +

+
(3) + +
\[ f(x) \Equal \F'(f) +\]
+

then we want to check $E \Less x$ for each call $f(E)$ in $\F'(f)$. When a function +definition satisfies this decrement condition, then the function is said to be +well-founded. +

+

For example, to check the decrement condition for $\fib$ in (2), we can pick +$\Less$ to be the arithmetic less-than relation on natural numbers and check the +following, for any $n$: +

+
(4) + +
\[ 2 \leq n \;\;\Imp\;\; n-2 \Less n \;\And\; n-1 \Less n +\]
+

Note that we are entitled to using the antecedent $2 \leq n$, because that is the +condition under which the else branch in (2) is evaluated. +

+

A well-founded function is often thought of as “terminating” in the sense +that the recursive depth in evaluating $f$ +on any given argument is finite. That is, there are no infinite descending chains +of recursive calls. However, the evaluation of $f$ on a given argument +may fail to terminate, because its width may be infinite. For example, let $P$ +be some predicate defined on the ordinals and let $\PDownward$ be a predicate on the +ordinals defined by the following equation: +

+
(5) + +
\[ \PDownward(o) \Equal + P(o) \And \forall p \bullet\; p \Less o \Imp \PDownward(p) +\]
+

With $\Less$ as the usual ordering on ordinals, this equation satisfies the decrement +condition, but evaluating $\PDownward(\omega)$ would require evaluating +$\PDownward(n)$ for every natural number $n$. However, what we are concerned +about here is to avoid mathematical inconsistencies, and that is +indeed a consequence of the decrement condition. +

11.0.0.0. Example with Well-founded Functions
+

So that we can later see how inductive proofs are done in Dafny, let's prove that +for any $n$, $\fib(n)$ is even iff $n$ is a multiple of $3$. +We split our task into +two cases. If $n < 2$, then the property follows directly from the definition +of $\fib$. Otherwise, note that exactly one of the three numbers $n-2$, $n-1$, and $n$ +is a multiple of 3. If $n$ is the multiple of 3, then by invoking the +induction hypothesis on $n-2$ +and $n-1$, we obtain that $\fib(n-2) + \fib(n-1)$ is the sum of two odd numbers, +which is even. If $n-2$ or $n-1$ is a multiple of 3, then by invoking the induction +hypothesis on $n-2$ and $n-1$, we obtain that $\fib(n-2) + \fib(n-1)$ is the sum of an +even number and an odd number, which is odd. In this proof, we invoked the induction +hypothesis on $n-2$ and on $n-1$. This is allowed, because both are smaller than +$n$, and hence the invocations go down in the well-founded ordering on natural numbers. +

11.0.1. Extreme Solutions

+

We don't need to exclude the possibility of equation (0) having multiple +solutions—instead, we can just be clear about which one of them we want. +Let's explore this, after a smidgen of lattice theory. +

+

For any complete lattice $(Y,\leq)$ and any set $X$, we can by pointwise extension define +a complete lattice $(X \to Y, \FBelow)$, where for any $f,g \colon X \to Y$, +

+
(6) + +
\[ f \FBelow q \Equiv + \forall x \bullet\; f(x) \leq g(x) +\]
+

In particular, if $Y$ is the set of booleans ordered by implication ($\false \leq \true$), +then the set of predicates over any domain $X$ forms a complete lattice. +Tarski's Theorem [35] tells us that any monotonic function over a +complete lattice has a least and a greatest fixpoint. In particular, this means that +$\F$ has a least fixpoint and a greatest fixpoint, provided $\F$ is monotonic. +

+

Speaking about the set of solutions in $f$ to (0) is the same as speaking +about the set of fixpoints of functor $\F$. In particular, the least and greatest +solutions to (0) are the same as the least and greatest fixpoints of $\F$. +In casual speak, it happens that we say “fixpoint of (0)”, or more +grotesquely, “fixpoint of $f$” when we really mean “fixpoint of $\F$”. +

+

In conclusion of our little excursion into lattice theory, we have that, under the +proviso of $\F$ being monotonic, the set of solutions in $f$ to (0) is nonempty, +and among these solutions, there is in the $\FBelow$ ordering a least solution (that is, +a function that returns $\false$ more often than any other) and a greatest solution (that +is, a function that returns $\true$ more often than any other). +

+

When discussing extreme solutions, I will now restrict my attention to boolean functions +(that is, with $Y$ being the type of booleans). Functor $\F$ is monotonic +if the calls to $f$ in $\F'(f)$ are in positive positions (that is, under an even number +of negations). Indeed, from now on, I will restrict my attention to such monotonic +functors $\F$. +

+

Let me introduce a running example. Consider the following equation, +where $x$ ranges over the integers: +

+
(7) + +
\[ g(x) \Equal (x = 0 \Or g(x-2)) +\]
+

This equation has four solutions in $g$. With $w$ ranging over the integers, they are: +

+
(8) + +
\[ \begin{array}{r@{}l} + g(x) \Equiv{}& x \in \{w \;|\; 0 \leq w \And w\textrm{ even}\} \\ + g(x) \Equiv{}& x \in \{w \;|\; w\textrm{ even}\} \\ + g(x) \Equiv{}& x \in \{w \;|\; (0 \leq w \And w\textrm{ even}) \Or w\textrm{ odd}\} \\ + g(x) \Equiv{}& x \in \{w \;|\; \true\} + \end{array} +\]
+

The first of these is the least solution and the last is the greatest solution. +

+

In the literature, the definition of an extreme predicate is often given as a set of +inference rules. To designate the least solution, a single line separating the +antecedent (on top) from conclusion (on bottom) is used: +

+
(9) + +
\[ \frac{}{g(0)} + \qquad\qquad + \frac{g(x-2)}{g(x)} +\]
+

Through repeated applications of such rules, one can show that the predicate holds for +a particular value. For example, the derivation, or proof tree, +to the left in Figure 0 shows that $g(6)$ holds. +(In this simple example, the derivation is a rather degenerate proof “tree”.) +The use of these inference rules gives rise to a least solution, because proof trees are +accepted only if they are finite. +

+
+
+
+
\[\dfrac{ + \dfrac{ + \dfrac{ + \dfrac{}{g(0)\xstrut} + }{g(2)\xstrut} + }{g(4)\xstrut} + }{g(6)\xupstrut} +\]
+
+
\[\Dfrac{ + \Dfrac{ + \Dfrac{ + \Dfrac{ + {}_{\vdots } + }{{g(-5)}} + }{{g(-3)}} + }{{g(-1)}} + }{g(1)} +\]
+
+ +
Figure 0. Left: a finite proof tree that uses the rules of (9) to establish $g(6)$. Right: an infinite proof tree that uses the rules of (10) to establish $g(1)$.
+

When inference rules are to designate the greatest solution, a double +line is used: +

+
(10) + +
\[ \Dfrac{}{g(0)} + \qquad\qquad + \Dfrac{g(x-2)}{g(x)} +\]
+

In this case, proof trees are allowed to be infinite. For example, the (partial depiction +of the) infinite proof tree on the right in Figure 0 shows that $g(1)$ holds. +

+

Note that derivations may not be unique. For example, in the case of the greatest +solution for $g$, there are two proof trees that establish $g(0)$: one is the finite +proof tree that uses the left-hand rule of (10) once, the other is the infinite +proof tree that keeps on using the right-hand rule of (10). +

11.0.2. Working with Extreme Predicates

+

In general, one cannot evaluate whether or not an extreme predicate holds for some +input, because doing so may take an infinite number of steps. For example, following +the recursive calls in the definition (7) to try to evaluate $g(7)$ would never +terminate. However, there are useful ways to establish that an extreme predicate holds +and there are ways to make use of one once it has been established. +

+

For any $\F$ as in (0), I define two infinite series of well-founded +functions, $\iter{f}_k$ and $\Iter{f}_k$ +where $k$ ranges over the natural numbers: +

+
(11) + +
\[ \iter{f}_k(x) \Equal \left\{ + \begin{array}{ll} + \false & \textrm{if } k = 0 \\ + \F(\iter{f}_{k-1})(x) & \textrm{if } k > 0 + \end{array} + \right. +\]
+
(12) + +
\[ \Iter{f}_k(x) \Equal \left\{ + \begin{array}{ll} + \true & \textrm{if } k = 0 \\ + \F(\Iter{f}_{k-1})(x) & \textrm{if } k > 0 + \end{array} + \right. +\]
+

These functions are called the iterates of $f$, and I will also refer to them +as the prefix predicates of $f$ (or the prefix predicate of $f$, if we think +of $k$ as being a parameter). +Alternatively, we can define $\iter{f}_k$ and $\Iter{f}_k$ without mentioning $x$: +Let $\bot$ denote the function that always returns $\false$, let $\top$ +denote the function that always returns $\true$, and let a superscript on $\F$ denote +exponentiation (for example, $\F^0(f) = f$ and $\F^2(f) = \F(\F(f))$). +Then, (11) and (12) can be stated equivalently as +$\iter{f}_k = \F^k(\bot)$ and $\Iter{f}_k = \F^k(\top)$. +

+

For any solution $f$ to equation (0), we have, for any $k$ and $\ell$ +such that $k \leq \ell$: +

+
(13) + +
\[ \iter{f}_k \quad\FBelow\quad + \iter{f}_\ell \quad\FBelow\quad + f \quad\FBelow\quad + \Iter{f}_\ell \quad\FBelow\quad + \Iter{f}_k +\]
+

In other words, every $\iter{f}_k$ is a pre-fixpoint of $f$ and every $\Iter{f}_k$ is a post-fixpoint +of $f$. Next, I define two functions, $f\least$ and $f\greatest$, in +terms of the prefix predicates: +

+
(14) + +
\[ f\least(x) \Equal \exists k \bullet\; \iter{f}_k(x) +\]
+
(15) + +
\[ f\greatest(x) \Equal \forall k \bullet\; \Iter{f}_k(x) +\]
+

By (13), we also have that $f\least$ is a pre-fixpoint of $\F$ and $f\greatest$ +is a post-fixpoint of $\F$. The marvelous thing is that, if $\F$ is continuous, then +$f\least$ and $f\greatest$ are the least and greatest fixpoints of $\F$. +These equations let us do proofs by induction when dealing with extreme predicates. +I will explain in Section 11.1.2 how to check for continuity. +

+

Let's consider two examples, both involving function $g$ in +(7). As it turns out, $g$'s defining functor is continuous, +and therefore I will write $g\least$ and $g\greatest$ to denote the +least and greatest solutions for $g$ in (7). +

11.0.2.0. Example with Least Solution
+

The main technique for establishing that $g\least(x)$ holds for some +$x$, that is, proving something of the form $Q \Imp g\least(x)$, is to +construct a proof tree like the one for $g(6)$ in Figure +0. For a proof in this direction, since we're just +applying the defining equation, the fact that +we're using a least solution for $g$ never plays a role (as long as we +limit ourselves to finite derivations). +

+

The technique for going in the other direction, proving something from an established +$g\least$ property, that is, showing something of the form $g\least(x) \Imp R$, typically +uses induction on the structure of the proof tree. When the antecedent of our proof +obligation includes a predicate term $g\least(x)$, it is sound to +imagine that we have been given a proof tree for $g\least(x)$. Such a proof tree +would be a data structure—to be more precise, a term in an +inductive datatype. +For this reason, least solutions like $g\least$ have been given the +name inductive predicate. +

+

Let's prove $g\least(x) \Imp 0 \leq x \And x \textrm{ even}$. +We split our task into two cases, corresponding to which of the two +proof rules in (9) was the +last one applied to establish $g\least(x)$. If it was the left-hand rule, then $x=0$, +which makes it easy to establish the conclusion of our proof goal. If it was the +right-hand rule, then we unfold the proof tree one level and obtain $g\least(x-2)$. +Since the proof tree for $g\least(x-2)$ is smaller than where we started, we invoke +the induction hypothesis and obtain $0 \leq (x-2) \And (x-2) \textrm{ even}$, from which +it is easy to establish the conclusion of our proof goal. +

+

Here's how we do the proof formally using (14). We massage the +general form of our proof goal: +

+ + + + +
$f\greatest(x) \Imp R$
=      { (14) }
$(\exists k \bullet\; \iter{f}_k(x)) \Imp R$
=      { distribute $\Imp$ over $\exists$ to the left }
$\forall k \bullet\; (\iter{f}_k(x) \Imp R)$
+

The last line can be proved by induction over $k$. So, in our case, we prove +$\iter{g}_k(x) \Imp 0 \leq x \And x \textrm{ even}$ for every $k$. +If $k=0$, then $\iter{g}_k(x)$ is $\false$, so our goal holds trivially. +If $k > 0$, then $\iter{g}_k(x) = (x = 0 \Or \iter{g}_{k-1}(x-2))$. Our goal holds easily +for the first disjunct ($x=0$). For the other disjunct, +we apply the induction hypothesis (on the smaller $k-1$ and with $x-2$) and +obtain $0 \leq (x-2) \And (x-2) \textrm{ even}$, from which our proof goal +follows. +

11.0.2.1. Example with Greatest Solution
+

We can think of a given predicate $g\greatest(x)$ as being represented +by a proof tree—in this case a term in a coinductive datatype, +since the proof may be infinite. +For this reason, greatest solutions like $g\greatest$ have +been given the name coinductive predicate, or co-predicate for short. +The main technique for proving something from a given proof tree, that +is, to prove something of the form $g\greatest(x) \Imp R$, is to +destruct the proof. Since this is just unfolding the defining +equation, the fact that we're using a greatest solution for $g$ never +plays a role (as long as we limit ourselves to a finite number of +unfoldings). +

+

To go in the other direction, to establish a predicate defined as a greatest solution, +like $Q \Imp g\greatest(x)$, we may need an infinite number of steps. For this purpose, +we can use induction's dual, coinduction. Were it not for one little detail, coinduction +is as simple as continuations in programming: the next part of the proof obligation +is delegated to the coinduction hypothesis. The little detail is making sure that +it is the “next” part we're passing on for the continuation, not the same part. This +detail is called productivity and corresponds to the requirement in +induction of making sure we're going down a well-founded relation when +applying the induction hypothesis. There are +many sources with more information, see for example the classic account by +Jacobs and Rutten [8] +or a new attempt by Kozen and Silva +that aims to emphasize the simplicity, not the mystery, of +coinduction [11]. +

+

Let's prove $\true \Imp g\greatest(x)$. The intuitive coinductive proof goes like this: +According to the right-hand rule of (10), $g\greatest(x)$ follows if we +establish $g\greatest(x-2)$, and that's easy to do by invoking the coinduction hypothesis. +The “little detail”, productivity, is satisfied in this proof because we applied +a rule in (10) before invoking the coinduction hypothesis. +

+

For anyone who may have felt that the intuitive proof felt too easy, here is a formal +proof using (15), which relies only on induction. We massage the +general form of our proof goal: +

+ + + + +
$Q \Imp f\greatest(x)$
=      { (15) }
$Q \Imp \forall k \bullet\; \Iter{f}_k(x)$
=      { distribute $\Imp$ over $\forall$ to the right }
$\forall k \bullet\; Q \Imp \Iter{f}_k(x)$
+

The last line can be proved by induction over $k$. So, in our case, we prove +$\true \Imp \Iter{g}_k(x)$ for every $k$. +If $k=0$, then $\Iter{g}_k(x)$ is $\true$, so our goal holds trivially. +If $k > 0$, then $\Iter{g}_k(x) = (x = 0 \Or \Iter{g}_{k-1}(x-2))$. We establish the second +disjunct by applying the induction hypothesis (on the smaller $k-1$ and with $x-2$). +

11.0.3. Other Techniques

+

Although in this paper I consider only well-founded functions and extreme +predicates, it is worth mentioning that there are additional ways of making sure that +the set of solutions to (0) is nonempty. For example, if all calls to $f$ in +$\F'(f)$ are tail-recursive calls, then (under the assumption that $Y$ is nonempty) the set of +solutions is nonempty. To see this, consider an attempted evaluation of $f(x)$ that fails +to determine a definite result value because of an infinite chain of calls that applies $f$ +to each value of some subset $X'$ of $X$. Then, apparently, the value of $f$ for any one +of the values in $X'$ is not determined by the equation, but picking any particular result +values for these makes for a consistent definition. +This was pointed out by Manolios and Moore [25]. +Functions can be underspecified in this way in the proof assistants ACL2 [10] +and HOL [12]. +

11.1. Functions in Dafny

+

In this section, I explain with examples the support in +Dafny5 for well-founded functions, extreme predicates, +and proofs regarding these. +

11.1.0. Well-founded Functions in Dafny

+

Declarations of well-founded functions are unsurprising. For example, the Fibonacci +function is declared as follows: +

+
function fib(n: nat): nat
+{
+  if n < 2 then n else fib(n-2) + fib(n-1)
+}
+

Dafny verifies that the body (given as an expression in curly braces) is well defined. +This includes decrement checks for recursive (and mutually recursive) calls. Dafny +predefines a well-founded relation on each type and extends it to lexicographic tuples +of any (fixed) length. For example, the well-founded relation $x \Less y$ for integers +is $x < y \And 0 \leq y$, the one for reals is $x \leq y - 1.0 \And 0.0 \leq y$ +(this is the same ordering as for integers, if you read the integer +relation as $x \leq y - 1 \And 0 \leq y$), the one for inductive +datatypes is structural inclusion, +and the one for coinductive datatypes is $\false$. +

+

Using a decreases clause, the programmer can specify the term in this predefined +order. When a function definition omits a decreases clause, Dafny makes a simple +guess. This guess (which can be inspected by hovering over the function name in the +Dafny IDE) is very often correct, so users are rarely bothered to provide explicit +decreases clauses. +

+

If a function returns bool, one can drop the result type : bool and change the +keyword function to predicate. +

11.1.1. Proofs in Dafny

+

Dafny has lemma declarations. These are really just special cases of methods: +they can have pre- and postcondition specifications and their body is a code block. +Here is the lemma we stated and proved in Section 11.0.0.0: +

+
lemma FibProperty(n: nat)
+  ensures fib(n) % 2 == 0 <==> n % 3 == 0
+{
+  if n < 2 {
+  } else {
+    FibProperty(n-2); FibProperty(n-1);
+  }
+}
+

The postcondition of this lemma (keyword ensures) gives the proof +goal. As in any program-correctness logic (e.g., +[7]), the postcondition must +be established on every control path through the lemma's body. For +FibProperty, I give the proof by +an if statement, hence introducing a case split. The then branch is empty, because +Dafny can prove the postcondition automatically in this case. The else branch +performs two recursive calls to the lemma. These are the invocations of the induction +hypothesis and they follow the usual program-correctness rules, +namely: the precondition must hold at the call site, the call must terminate, and then +the caller gets to assume the postcondition upon return. The “proof glue” needed +to complete the proof is done automatically by Dafny. +

+

Dafny features an aggregate statement using which it is possible to make (possibly +infinitely) many calls at once. For example, the induction hypothesis can be called +at once on all values n' smaller than n: +

+
forall n' | 0 <= n' < n {
+  FibProperty(n');
+}
+

For our purposes, this corresponds to strong induction. More +generally, the forall statement has the form +

+
forall k | P(k)
+  ensures Q(k)
+{ Statements; }
+

Logically, this statement corresponds to universal introduction: the body proves that +Q(k) holds for an arbitrary k such that P(k), and the conclusion of the forall statement +is then $\forall k \bullet\; P(k) \Imp Q(k)$. When the body of the forall statement is +a single call (or calc statement), the ensures clause is inferred and can be omitted, +like in our FibProperty example. +

+

Lemma FibProperty is simple enough that its whole body can be replaced by the one +forall statement above. In fact, Dafny goes one step further: it automatically +inserts such a forall statement at the beginning of every lemma [19]. +Thus, FibProperty can be declared and proved simply by: +

+
lemma FibProperty(n: nat)
+  ensures fib(n) % 2 == 0 <==> n % 3 == 0
+{ }
+

Going in the other direction from universal introduction is existential elimination, +also known as Skolemization. Dafny has a statement for this, too: +for any variable x and boolean expression Q, the +assign such that statement x :| Q; says to assign to x a value such that Q +will hold. A proof obligation when using this statement is to show that there +exists an x such that Q holds. For example, if the fact +$\exists k \bullet\; 100 \leq \fib(k) < 200$ is known, then the statement +k :| 100 <= fib(k) < 200; will assign to k some value (chosen arbitrarily) +for which fib(k) falls in the given range. +

11.1.2. Extreme Predicates in Dafny

+

In this previous subsection, I explained that a predicate declaration introduces a +well-founded predicate. The declarations for introducing extreme predicates are +inductive predicate and copredicate. Here is the definition of the least and +greatest solutions of $g$ from above, let's call them g and G: +

+
inductive predicate g(x: int) { x == 0 || g(x-2) }
+copredicate G(x: int) { x == 0 || G(x-2) }
+

When Dafny receives either of these definitions, it automatically declares the corresponding +prefix predicates. Instead of the names $\iter{g}_k$ and $\Iter{g}_k$ that I used above, Dafny +names the prefix predicates g#[k] and G#[k], respectively, that is, the name of +the extreme predicate appended with #, and the subscript is given as an argument in +square brackets. The definition of the prefix predicate derives from the body of +the extreme predicate and follows the form in (11) and (12). +Using a faux-syntax for illustrative purposes, here are the prefix +predicates that Dafny defines automatically from the extreme +predicates g and G: +

+
predicate g#[_k: nat](x: int) { _k != 0 && (x == 0 || g#[_k-1](x-2)) }
+predicate G#[_k: nat](x: int) { _k != 0 ==> (x == 0 || G#[_k-1](x-2)) }
+

The Dafny verifier is aware of the connection between extreme predicates and their +prefix predicates, (14) and (15). +

+

Remember that to be well defined, the defining functor of an extreme predicate +must be monotonic, and for (14) and (15) to hold, +the functor must be continuous. Dafny enforces the former of these by checking that +recursive calls of extreme predicates are in positive positions. The continuity +requirement comes down to checking that they are also in continuous positions: +that recursive calls to inductive predicates are +not inside unbounded universal quantifiers and that recursive calls to co-predicates +are not inside unbounded existential quantifiers [21, 26]. +

11.1.3. Proofs about Extreme Predicates

+

From what I have presented so far, we can do the formal proofs from Sections +11.0.2.0 and 11.0.2.1. Here is the +former: +

+
lemma EvenNat(x: int)
+  requires g(x)
+  ensures 0 <= x && x % 2 == 0
+{
+  var k: nat :| g#[k](x);
+  EvenNatAux(k, x);
+}
+lemma EvenNatAux(k: nat, x: int)
+  requires g#[k](x)
+  ensures 0 <= x && x % 2 == 0
+{
+  if x == 0 { } else { EvenNatAux(k-1, x-2); }
+}
+

Lemma EvenNat states the property we wish to prove. From its +precondition (keyword requires) and +(14), we know there is some k that will make the condition in the +assign-such-that statement true. Such a value is then assigned to k and passed to +the auxiliary lemma, which promises to establish the proof goal. Given the condition +g#[k](x), the definition of g# lets us conclude k != 0 as well as the disjunction +x == 0 || g#[k-1](x-2). The then branch considers the case of the first disjunct, +from which the proof goal follows automatically. The else branch can then assume +g#[k-1](x-2) and calls the induction hypothesis with those parameters. The proof +glue that shows the proof goal for x to follow from the proof goal with x-2 is +done automatically. +

+

Because Dafny automatically inserts the statement +

+
forall k', x' | 0 <= k' < k && g#[k'](x') {
+  EvenNatAux(k', x');
+}
+

at the beginning of the body of EvenNatAux, the body can be left empty and Dafny +completes the proof automatically. +

+

Here is the Dafny program that gives the proof from Section 11.0.2.1: +

+
lemma Always(x: int)
+  ensures G(x)
+{ forall k: nat { AlwaysAux(k, x); } }
+lemma AlwaysAux(k: nat, x: int)
+  ensures G#[k](x)
+{ }
+

While each of these proofs involves only basic proof rules, the setup feels a bit clumsy, +even with the empty body of the auxiliary lemmas. Moreover, +the proofs do not reflect the intuitive proofs I described in +Section 11.0.2.0 and 11.0.2.1. +These shortcoming are addressed in the next subsection. +

11.1.4. Nicer Proofs of Extreme Predicates

+

The proofs we just saw follow standard forms: +use Skolemization to convert the inductive predicate into a prefix predicate for some k +and then do the proof inductively over k; respectively, +by induction over k, prove the prefix predicate for every k, then use +universal introduction to convert to the coinductive predicate. +With the declarations inductive lemma and colemma, Dafny offers to +set up the proofs +in these standard forms. What is gained is not just fewer characters in the program +text, but also a possible intuitive reading of the proofs. (Okay, to be fair, the +reading is intuitive for simpler proofs; complicated proofs may or may not be intuitive.) +

+

Somewhat analogous to the creation of prefix predicates from extreme predicates, Dafny +automatically creates a prefix lemma L# from each “extreme lemma” L. The pre- +and postconditions of a prefix lemma are copied from those of the extreme lemma, +except for the following replacements: +For an inductive lemma, Dafny looks in the precondition to find calls (in positive, continuous +positions) to inductive predicates P(x) and replaces these with P#[_k](x). +For a +co-lemma, Dafny looks in the postcondition to find calls (in positive, continuous positions) +to co-predicates P (including equality among coinductive datatypes, which is a built-in +co-predicate) and replaces these with P#[_k](x). +In each case, these predicates P are the lemma's focal predicates. +

+

The body of the extreme lemma is moved to the prefix lemma, but with +replacing each recursive +call L(x) with L#[_k-1](x) and replacing each occurrence of a call +to a focal predicate +P(x) with P#[_k-1](x). The bodies of the extreme lemmas are then replaced as shown +in the previous subsection. By construction, this new body correctly leads to the +extreme lemma's postcondition. +

+

Let us see what effect these rewrites have on how one can write proofs. Here are the proofs +of our running example: +

+
inductive lemma EvenNat(x: int)
+  requires g(x)
+  ensures 0 <= x && x % 2 == 0
+{ if x == 0 { } else { EvenNat(x-2); } }
+colemma Always(x: int)
+  ensures G(x)
+{ Always(x-2); }
+

Both of these proofs follow the intuitive proofs given in Sections +11.0.2.0 and 11.0.2.1. Note that in these +simple examples, the user is never bothered with either prefix predicates nor +prefix lemmas—the proofs just look like “what you'd expect”. +

+

Since Dafny automatically inserts calls to the induction hypothesis at the beginning of +each lemma, the bodies of the given extreme lemmas EvenNat and +Always can be empty and Dafny still completes the proofs. +Folks, it doesn't get any simpler than that! +

12. Class Types

+
ClassDecl = "class" { Attribute } ClassName [ GenericParameters ]
+  ["extends" Type {"," Type} ] 
+  "{" { { DeclModifier } ClassMemberDecl(moduleLevelDecl: false) } "}" 
+
ClassMemberDecl(moduleLevelDecl) = 
+  ( FieldDecl | FunctionDecl | 
+    MethodDecl(isGhost: ("ghost" was present), 
+               allowConstructor: !moduleLevelDecl) 
+  ) 
+

The ClassMemberDecl parameter moduleLevelDecl will be true if +the member declaration is at the top level or directly within a +module declaration. It will be false for ClassMemberDecls +that are part of a class or trait declaration. If moduleLevelDecl is +false FieldDecls are not allowed. +

+

A class C is a reference type declared as follows: +

+
class C<T> extends J1, ..., Jn
+{
+  members
+}
+

where the list of type parameters T is optional and so is +“extends J1, ..., Jn”, which says that the class extends traits J1Jn. +The members of a class are fields, functions, and +methods. These are accessed or invoked by dereferencing a reference +to a C instance. +

+

A function or method is invoked on an instance +of C, unless the function or method is declared static. +A function or method that is not static is called an +instance function or method. +

+

An instance function or method takes an implicit receiver +parameter, namely, the instance used to access the member. In the +specification and body of an instance function or method, the receiver +parameter can be referred to explicitly by the keyword this. +However, in such places, members of this can also be mentioned +without any qualification. To illustrate, the qualified this.f and +the unqualified f refer to the same field of the same object in the +following example: +

+
class C {
+  var f: int
+  method Example() returns (b: bool)
+  {
+    b := f == this.f;
+  }
+}
+

so the method body always assigns true to the out-parameter b. +There is no semantic difference between qualified and +unqualified accesses to the same receiver and member. +

+

A C instance is created using new, for example: +

+
c := new C;
+

Note that new simply allocates a C object and returns a reference +to it; the initial values of its fields are arbitrary values of their +respective types. Therefore, it is common to invoke a method, known +as an initialization method, immediately after creation, for +example: +

+
c := new C;
+c.InitFromList(xs, 3);
+

When an initialization method has no out-parameters and modifies no +more than this, then the two statements above can be combined into +one: +

+
c := new C.InitFromList(xs, 3);
+

Note that a class can contain several initialization methods, that +these methods can be invoked at any time, not just as part of a new, +and that new does not require that an initialization method be +invoked at creation. +

+

A clas can declare special initializing methods called constructor methods. +See Section 12.1. +

12.0. Field Declarations

+
FieldDecl = "var" { Attribute } FIdentType { "," FIdentType }
+

An FIdentType is used to declare a field. The field name is either an +identifier (that is not allowed to start with a leading underscore) or +some digits. Digits are used if you want to number your fields, e.g. “0”, +“1”, etc. +

+
FIdentType = ( FieldIdent | digits ) ":" Type 
+

A field x of some type T is declared as: +

+
var x: T
+

A field declaration declares one or more fields of the enclosing class. +Each field is a named part of the state of an object of that class. A +field declaration is similar to but distinct from a variable declaration +statement. Unlike for local variables and bound variables, the type is +required and will not be inferred. +

+

Unlike method and function declarations, a field declaration +cannot be given at the top level. Fields can be declared in either a +class or a trait. A class that inherits from multiple traits will +have all the fields declared in any of its parent traits. +

+

Fields that are declared as ghost can only be used in specifications, +not in code that will be compiled into executable code. +

+

Fields may not be declared static. +

+

protected is not allowed for fields. +

12.1. Method Declarations

+
MethodDecl(isGhost, allowConstructor) = 
+ MethodKeyword { Attribute } [ MethodName ]
+ (  MethodSignature(isGhost)  | SignatureEllipsis_ )
+ MethodSpec [ BlockStmt ]
+

The isGhost parameter is true iff the ghost keyword +preceded the method declaration. +

+

If the allowConstructor parameter is false then +the MethodDecl must not be a constructor +declaration. +

+
MethodKeyword = ("method" | "lemma" | "colemma"
+                | "inductive" "lemma" | "constructor" )
+

The method keyword is used to specify special kinds of methods +as explained below. +

+
MethodSignature(isGhost) = 
+    [ GenericParameters ] 
+    Formals(allowGhost: !isGhost) 
+    [ "returns" Formals(allowGhost: !isGhost) ]
+

A method signature specifies the method generic parameters, +input parameters and return parameters. +The formal parameters are not allowed to have ghost specified +if ghost was already specified for the method. +

+
SignatureEllipsis_ = "…"
+

A SignatureEllipsis_ is used when a method or function is being redeclared +in module that refines another module. In that case the signature is +copied from the module that is being refined. This works because +Dafny does not support method or function overloading, so the +name of the class method uniquely identifies it without the +signature. +

+
Formals(allowGhostKeyword) = 
+  "(" [ GIdentType(allowGhostKeyword) 
+        { "," GIdentType(allowGhostKeyword) } ] ")" 
+

The Formals specifies the names and types of the method input or +output parameters. +

+

See section 4.1 for a description of MethodSpec. +

+

A method declaration adheres to the MethodDecl grammar above. +Here is an example of a method declaration. +

+
method {:att1}{:att2} M<T1, T2>(a: A, b: B, c: C) returns (x: X, y: Y, z: Z)
+  requires Pre
+  modifies Frame
+  ensures Post
+  decreases Rank
+{
+  Body
+}
+

where :att1 and :att2 are attributes of the method, +T1 and T2 are type parameters of the method (if generic), +a, b, c are the method’s in-parameters, x, y, z are the +method’s out-parameters, Pre is a boolean expression denoting the +method’s precondition, Frame denotes a set of objects whose fields may +be updated by the method, Post is a boolean expression denoting the +method’s postcondition, Rank is the method’s variant function, and +Body is a statement that implements the method. Frame can be a list +of expressions, each of which is a set of objects or a single object, the +latter standing for the singleton set consisting of that one object. The +method’s frame is the union of these sets, plus the set of objects +allocated by the method body. For example, if c and d are parameters +of a class type C, then +

+
modifies {c, d}
+
+modifies {c} + {d}
+
+modifies c, {d}
+
+modifies c, d
+

all mean the same thing. +

+

A method can be declared as ghost by preceding the declaration with the +keyword ghost. By default, a method has an implicit receiver parameter, +this. This parameter can be removed by preceding the method declaration +with the keyword static. A static method M in a class C can be invoked by +C.M(…). +

+

In a class, a method can be declared to be a constructor method by +replacing the keyword method with the keyword constructor. A constructor +can only be called at the time an object is allocated (see +object-creation examples below), and for a class that contains one or +more constructors, object creation must be done in conjunction with a +call to a constructor. +

+

An ordinary method is declared with the method keyword. +Section 12.1.0 explains methods that instead use the +constructor keyword. Section 12.1.1 discusses methods that are +declared with the lemma keyword. Methods declared with the inductive +lemma keywords are discussed later in the context of inductive +predicates (see 18.0). Methods declared with the +colemma keyword are discussed later in the context of co-inductive +types, in section 18.2.4.1. +

+

A method without is body is abstract. A method is allowed to be +abstract under the following circumstances: +

+
    +
  • It contains an {:axiom} attribute +
  • +
  • It contains an {:imported} attribute +
  • +
  • It contains a {:decl} attribute +
  • +
  • It is a declaration in an abstract module. +Note that when there is no body, Dafny assumes that the ensures +clauses are true without proof. +
+

12.1.0. Constructors

+

To write structured object-oriented programs, one often relies on that +objects are constructed only in certain ways. For this purpose, Dafny +provides constructor (method)s, which are a restricted form of +initialization methods. A constructor is declared with the keyword +constructor instead of method. When a class contains a +constructor, every call to new for that class must be accompanied +with a call to one of the constructors. Moreover, a constructor +cannot be called at other times, only during object creation. Other +than these restrictions, there is no semantic difference between using +ordinary initialization methods and using constructors. +

+

The Dafny design allows the constructors to be named, which promotes +using names like InitFromList above. Still, many classes have just +one constructor or have a typical constructor. Therefore, Dafny +allows one anonymous constructor, that is, a constructor whose name +is essentially “”. For example: +

+
class Item {
+  constructor (x: int, y: int)
+  // ...
+}
+

When invoking this constructor, the “.” is dropped, as in: +

+
m := new Item(45, 29);
+

Note that an anonymous constructor is just one way to name a +constructor; there can be other constructors as well. +

12.1.1. Lemmas

+

Sometimes there are steps of logic required to prove a program correct, +but they are too complex for Dafny to discover and use on its own. When +this happens, we can often give Dafny assistance by providing a lemma. +This is done by declaring a method with the lemma keyword. +Lemmas are implicitly ghost methods and the ghost keyword cannot +be applied to them. +

+

For an example, see the FibProperty lemma in +Section 11.1.1. +

+

See the Dafny Lemmas tutorial +for more examples and hints for using lemmas. +

12.2. Function Declarations

+
FunctionDecl = 
+  ( "function" [ "method" ] { Attribute }
+    FunctionName 
+    FunctionSignatureOrEllipsis_(allowGhostKeyword: ("method" present))
+  | "predicate" [ "method" ] { Attribute }
+    PredicateName 
+    PredicateSignatureOrEllipsis_(allowGhostKeyword: ("method" present))
+  | "inductive" "predicate" { Attribute }
+    PredicateName 
+    PredicateSignatureOrEllipsis_(allowGhostKeyword: false)
+  | "copredicate" { Attribute }
+    CopredicateName 
+    PredicateSignatureOrEllipsis_(allowGhostKeyword: false)
+  )
+  FunctionSpec [ FunctionBody ] 
+
+FunctionSignatureOrEllipsis_(allowGhostKeyword) =
+    FunctionSignature_ | SignatureEllipsis_ 
+FunctionSignature_(allowGhostKeyword) =
+    [ GenericParameters ] Formals(allowGhostKeyword) ":" Type 
+
+PredicateSignatureOrEllipsis_(allowGhostKeyword) =
+    PredicateSignature_(allowGhostKeyword) | SignatureEllipsis_ 
+PredicateSignature_(allowGhostKeyword) =
+    [ GenericParameters ] Formals(allowGhostKeyword)
+
+FunctionBody = "{" Expression(allowLemma: true, allowLambda: true) "}" 
+

In the above productions, allowGhostKeyword is true if the optional +“method” keyword was specified. This allows some of the +formal parameters of a function method to be specified as ghost. +

+

See section 4.2 for a description of FunctionSpec. +

+

A Dafny function is a pure mathematical function. It is allowed to +read memory that was specified in its reads expression but is not +allowed to have any side effects. +

+

Here is an example function declaration: +

+
function {:att1}{:att2} F<T1, T2>(a: A, b: B, c: C): T
+  requires Pre
+  reads Frame
+  ensures Post
+  decreases Rank
+{
+  Body
+}
+

where :att1 and :att2 are attributes of the function, if any, T1 +and T2 are type parameters of the function (if generic), a, b, c are +the functions’s parameters, T is the type of the function’s result, +Pre is a boolean expression denoting the function’s precondition, +Frame denotes a set of objects whose fields the function body may +depend on, Post is a boolean expression denoting the function’s +postcondition, Rank is the function’s variant function, and Body is +an expression that defines the function return value. The precondition +allows a function to be partial, that is, the precondition says when the +function is defined (and Dafny will verify that every use of the function +meets the precondition). The postcondition is usually not needed, since +the body of the function gives the full definition. However, the +postcondition can be a convenient place to declare properties of the +function that may require an inductive proof to establish. For example: +

+
function Factorial(n: int): int
+  requires 0 <= n
+  ensures 1 <= Factorial(n)
+{
+  if n == 0 then 1 else Factorial(n-1) * n
+}
+

says that the result of Factorial is always positive, which Dafny +verifies inductively from the function body. To refer to the function’s +result in the postcondition, use the function itself, as shown in the +example. +

+

By default, a function is ghost, and cannot be called from non-ghost +code. To make it non-ghost, replace the keyword function with the two +keywords “function method”. +

+

By default, a function has an implicit receiver parameter, this. This +parameter can be removed by preceding the function declaration with the +keyword static. A static function F in a class C can be invoked +by C.F(…). This can give a convenient way to declare a number of helper +functions in a separate class. +

+

As for methods, a SignatureEllipsis_ is used when declaring +a function in a module refinement. For example, if module M0 declares +function F, a module M1 can be declared to refine M0 and +M1 can then refine F. The refinement function, M1.F can have +a SignatureEllipsis_ which means to copy the signature form +M0.F. A refinement function can furnish a body for a function +(if M0.F does not provide one). It can also add ensures +clauses. And if F is a predicate, it can add conjuncts to +a previously given body. +

12.2.0. Function Transparency

+

A function is said to be transparent in a location if the +contents of the body of the function is visible at that point. +A function is said to be opaque at a location if it is not +transparent. However the FunctionSpec of a function +is always available. +

+

A function is usually transparent up to some unrolling level (up to +1, or maybe 2 or 3). If its arguments are all literals it is +transparent all the way. +

+

But the transparency of a function is affected by the following: +

+
    +
  • whether the function was declared to be protected, and +
  • +
  • whether the function was given the {:opaque} attribute (as explained +in Section 24.1.12). +
+ +

The following table summarizes where the function is transparent. +The module referenced in the table is the module in which the +function is defined. +

+ + + + + + + + +
Protected? {:opaque}? Transparent Transparent
Inside Outside
Module Module
N N Y Y
Y N Y N
N Y N N
+

When {:opaque} is specified for function g, g is opaque, +however the lemma reveal_g is available to give the semantics +of g whether in the defining module or outside. +

+

It currently is not allowed to have both protected and +{:opaque} specified for a function. +

12.2.1. Predicates

+

A function that returns a bool results is called a predicate. As an +alternative syntax, a predicate can be declared by replacing the function +keyword with the predicate keyword and omitting a declaration of the +return type. +

12.2.2. Inductive Predicates and Lemmas

+

See section 11.1.2 for descriptions +of inductive predicates and lemmas. +

13. Trait Types

+
TraitDecl = "trait" { Attribute } TraitName [ GenericParameters ]
+  "{" { { DeclModifier } ClassMemberDecl(moduleLevelDecl: false) } "}" 
+

A trait is an “abstract superclass”, or call it an “interface” or +“mixin”. Traits are new to Dafny and are likely to evolve for a +while. +

+

The declaration of a trait is much like that of a class: +

+
trait J
+{
+  members
+}
+

where members can include fields, functions, and methods, but +no constructor methods. The functions and methods are allowed to be +declared static. +

+

A reference type C that extends a trait J is assignable to J, but +not the other way around. The members of J are available as members +of C. A member in J is not allowed to be redeclared in C, +except if the member is a non-static function or method without a +body in J. By doing so, type C can supply a stronger +specification and a body for the member. +

+

new is not allowed to be used with traits. Therefore, there is no +object whose allocated type is a trait. But there can of course be +objects of a class C that implements a trait J, and a reference to +such a C object can be used as a value of type J. +

+

As an example, the following trait represents movable geometric shapes: +

+
trait Shape
+{
+  function method Width(): real
+    reads this
+  method Move(dx: real, dy: real)
+    modifies this
+  method MoveH(dx: real)
+    modifies this
+  {
+    Move(dx, 0.0);
+  }
+}
+

Members Width and Move are abstract (that is, body less) and can +be implemented differently by different classes that extend the trait. +The implementation of method MoveH is given in the trait and thus +gets used by all classes that extend Shape. Here are two classes +that each extends Shape: +

+
class UnitSquare extends Shape
+{
+  var x: real, y: real
+  function method Width(): real {  // note the empty reads clause
+    1.0
+  }
+  method Move(dx: real, dy: real)
+    modifies this
+  {
+    x, y := x + dx, y + dy;
+  }
+}
+class LowerRightTriangle extends Shape
+{
+  var xNW: real, yNW: real, xSE: real, ySE: real
+  function method Width(): real
+    reads this
+  {
+    xSE - xNW
+  }
+  method Move(dx: real, dy: real)
+    modifies this
+  {
+    xNW, yNW, xSE, ySE := xNW + dx, yNW + dy, xSE + dx, ySE + dy;
+  }
+}
+

Note that the classes can declare additional members, that they supply +implementations for the abstract members of the trait, +that they repeat the member signatures, and that they are responsible +for providing their own member specifications that both strengthen the +corresponding specification in the trait and are satisfied by the +provided body. +Finally, here is some code that creates two class instances and uses +them together as shapes: +

+
var myShapes: seq<Shape>;
+var A := new UnitSquare;
+myShapes := [A];
+var tri := new LowerRightTriangle;
+// myShapes contains two Shape values, of different classes
+myShapes := myShapes + [tri];
+// move shape 1 to the right by the width of shape 0
+myShapes[1].MoveH(myShapes[0].Width());

14. Array Types

+
ArrayType_ = arrayToken [ GenericInstantiation ] 
+

Dafny supports mutable fixed-length array types of any positive +dimension. Array types are reference types. +

14.0. One-dimensional arrays

+

A one-dimensional array of n T elements is created as follows: +

+
a := new T[n];
+

The initial values of the array elements are arbitrary values of type +T. +The length of an array is retrieved using the immutable Length +member. For example, the array allocated above satisfies: +

+
a.Length == n
+

For any integer-based numeric i in the range 0 <= i < a.Length, +the array selection expression a[i] retrieves element i (that +is, the element preceded by i elements in the array). The +element stored at i can be changed to a value t using the array +update statement: +

+
a[i] := t;
+

Caveat: The type of the array created by new T[n] is +array<T>. A mistake that is simple to make and that can lead to +befuddlement is to write array<T> instead of T after new. +For example, consider the following: +

+
var a := new array<T>;
+var b := new array<T>[n];
+var c := new array<T>(n);  // resolution error
+var d := new array(n);  // resolution error
+

The first statement allocates an array of type array<T>, but of +unknown length. The second allocates an array of type +array<array<T>> of length n, that is, an array that holds n +values of type array<T>. The third statement allocates an +array of type array<T> and then attempts to invoke an anonymous +constructor on this array, passing argument n. Since array has no +constructors, let alone an anonymous constructor, this statement +gives rise to an error. If the type-parameter list is omitted for a +type that expects type parameters, Dafny will attempt to fill these +in, so as long as the array type parameter can be inferred, it is +okay to leave off the “<T>” in the fourth statement above. However, +as with the third statement, array has no anonymous constructor, so +an error message is generated. +

+

One-dimensional arrays support operations that convert a stretch of +consecutive elements into a sequence. For any array a of type +array<T>, integer-based numerics lo and hi satisfying +0 <= lo <= hi <= a.Length, the following operations each yields a +seq<T>: +

+ + + + + + + +
expression description
a[lo..hi] subarray conversion to sequence
a[lo..] drop
a[..hi] take
a[..] array conversion to sequence
+

The expression a[lo..hi] takes the first hi elements of the array, +then drops the first lo elements thereof and returns what remains as +a sequence. The resulting sequence thus has length hi - lo. +The other operations are special instances of the first. If lo is +omitted, it defaults to 0 and if hi is omitted, it defaults to +a.Length. +In the last operation, both lo and hi have been omitted, thus +a[..] returns the sequence consisting of all the array elements of +a. +

+

The subarray operations are especially useful in specifications. For +example, the loop invariant of a binary search algorithm that uses +variables lo and hi to delimit the subarray where the search key +may be still found can be expressed as follows: +

+
key !in a[..lo] && key !in a[hi..]
+

Another use is to say that a certain range of array elements have not +been changed since the beginning of a method: +

+
a[lo..hi] == old(a[lo..hi])
+

or since the beginning of a loop: +

+
ghost var prevElements := a[..];
+while // ...
+  invariant a[lo..hi] == prevElements[lo..hi]
+{
+  // ...
+}
+

Note that the type of prevElements in this example is seq<T>, if +a has type array<T>. +

+

A final example of the subarray operation lies in expressing that an +array's elements are a permutation of the array's elements at the +beginning of a method, as would be done in most sorting algorithms. +Here, the subarray operation is combined with the sequence-to-multiset +conversion: +

+
multiset(a[..]) == multiset(old(a[..]))

14.1. Multi-dimensional arrays

+

An array of 2 or more dimensions is mostly like a one-dimensional +array, except that new takes more length arguments (one for each +dimension), and the array selection expression and the array update +statement take more indices. For example: +

+
matrix := new T[m, n];
+matrix[i, j], matrix[x, y] := matrix[x, y], matrix[i, j];
+

create a 2-dimensional array whose dimensions have lengths m and +n, respectively, and then swaps the elements at i,j and x,y. +The type of matrix is array2<T>, and similarly for +higher-dimensional arrays (array3<T>, array4<T>, etc.). Note, +however, that there is no type array0<T>, and what could have been +array1<T> is actually named just array<T>. +

+

The new operation above requires m and n to be non-negative +integer-based numerics. These lengths can be retrieved using the +immutable fields Length0 and Length1. For example, the following +holds of the array created above: +

+
matrix.Length0 == m && matrix.Length1 == n
+

Higher-dimensional arrays are similar (Length0, Length1, +Length2, …). The array selection expression and array update +statement require that the indices are in bounds. For example, the +swap statement above is well-formed only if: +

+
0 <= i < matrix.Length0 && 0 <= j < matrix.Length1 &&
+0 <= x < matrix.Length0 && 0 <= y < matrix.Length1
+

In contrast to one-dimensional arrays, there is no operation to +convert stretches of elements from a multi-dimensional array to a +sequence. +

15. Type object

+
ObjectType_ = "object"
+

There is a built-in trait object that is like a supertype of all +reference types.6 Every class automatically extends +object and so does every user-defined trait. The purpose of type object +is to enable a uniform treatment of dynamic frames. In particular, it +is useful to keep a ghost field (typically named Repr for +“representation”) of type set<object>. +

16. Iterator types

+
IteratorDecl = "iterator" { Attribute } IteratorName
+  ( [ GenericParameters ] 
+    Formals(allowGhostKeyword: true)
+    [ "yields" Formals(allowGhostKeyword: true) ]
+  | "…"
+  )
+  IteratorSpec [ BlockStmt ] 
+

See section 4.4 for a description of IteratorSpec. +

+

An iterator provides a programming abstraction for writing code that +iteratively returns elements. These CLU-style iterators are +co-routines in the sense that they keep track of their own program +counter and control can be transferred into and out of the iterator +body. +

+

An iterator is declared as follows: +

+
iterator Iter<T>(in-params) yields (yield-params)
+  specification
+{
+  body
+}
+

where T is a list of type parameters (as usual, if there are no type +parameters, “<T>” is omitted). This declaration gives rise to a +reference type with the same name, Iter<T>. In the signature, +in-parameters and yield-parameters are the iterator's analog of a +method's in-parameters and out-parameters. The difference is that the +out-parameters of a method are returned to a caller just once, whereas +the yield-parameters of an iterator are returned each time the iterator +body performs a yield. The body consists of statements, like in a +method body, but with the availability also of yield statements. +

+

From the perspective of an iterator client, the iterator declaration +can be understood as generating a class Iter<T> with various +members, a simplified version of which is described next. +

+

The Iter<T> class contains an anonymous constructor whose parameters +are the iterator's in-parameters: +

+
predicate Valid()
+constructor (in-params)
+  modifies this
+  ensures Valid()
+

An iterator is created using new and this anonymous constructor. +For example, an iterator willing to return ten consecutive integers +from start can be declared as follows: +

+
iterator Gen(start: int) yields (x: int)
+{
+  var i := 0;
+  while i < 10 {
+    x := start + i;
+    yield;
+    i := i + 1;
+  }
+}
+

An instance of this iterator is created using: +

+
iter := new Gen(30);
+

The predicate Valid() says when the iterator is in a state where one +can attempt to compute more elements. It is a postcondition of the +constructor and occurs in the specification of the MoveNext member: +

+
method MoveNext() returns (more: bool)
+  requires Valid()
+  modifies this
+  ensures more ==> Valid()
+

Note that the iterator remains valid as long as MoveNext returns +true. Once MoveNext returns false, the MoveNext method can no +longer be called. Note, the client is under no obligation to keep +calling MoveNext until it returns false, and the body of the +iterator is allowed to keep returning elements forever. +

+

The in-parameters of the iterator are stored in immutable fields of +the iterator class. To illustrate in terms of the example above, the +iterator class Gen contains the following field: +

+
var start: int
+

The yield-parameters also result in members of the iterator class: +

+
var x: int
+

These fields are set by the MoveNext method. If MoveNext returns +true, the latest yield values are available in these fields and the +client can read them from there. +

+

To aid in writing specifications, the iterator class also contains +ghost members that keep the history of values returned by +MoveNext. The names of these ghost fields follow the names of the +yield-parameters with an “s” appended to the name (to suggest +plural). Name checking rules make sure these names do not give rise +to ambiguities. The iterator class for Gen above thus contains: +

+
ghost var xs: seq<int>
+

These history fields are changed automatically by MoveNext, but are +not assignable by user code. +

+

Finally, the iterator class contains some special fields for use in +specifications. In particular, the iterator specification gets +recorded in the following immutable fields: +

+
ghost var _reads: set<object>
+ghost var _modifies: set<object>
+ghost var _decreases0: T0
+ghost var _decreases1: T1
+// ...
+

where there is a _decreasesi: Ti field for each +component of the iterator's decreases +clause.7 +In addition, there is a field: +

+
ghost var _new: set<object>;
+

to which any objects allocated on behalf of the iterator body get +added. The iterator body is allowed to remove elements from the +_new set, but cannot by assignment to _new add any elements. +

+

Note, in the precondition of the iterator, which is to hold upon +construction of the iterator, the in-parameters are indeed +in-parameters, not fields of this. +

+

It's regrettably tricky to use iterators. The language really +ought to have a foreach statement to make this easier. +Here is an example showing definition and use of an iterator. +

+
iterator Iter<T>(s: set<T>) yields (x: T)
+  yield ensures x in s && x !in xs[..|xs|-1];
+  ensures s == set z | z in xs;
+{
+  var r := s;
+  while (r != {})
+    invariant forall z :: z in xs ==> x !in r;  // r and xs are disjoint
+    invariant s == r + set z | z in xs;
+  {
+    var y :| y in r;
+    r, x := r - {y}, y;
+    yield;
+    assert y == xs[|xs|-1];  // needed as a lemma to prove loop invariant
+  }
+}
+
+method UseIterToCopy<T>(s: set<T>) returns (t: set<T>)
+  ensures s == t;
+{
+  t := {};
+  var m := new Iter(s);
+  while (true)
+    invariant m.Valid() && fresh(m._new);
+    invariant t == set z | z in m.xs;
+    decreases s - t;
+  {
+    var more := m.MoveNext();
+    if (!more) { break; }
+    t := t + {m.x};
+  }
+}
+ + +

17. Function types

+
Type = DomainType "->" Type  
+

Functions are first-class values in Dafny. Function types have the form +(T) -> U where T is a comma-delimited list of types and U is a +type. T is called the function's domain type(s) and U is its +range type. For example, the type of a function +

+
function F(x: int, b: bool): real
+

is (int, bool) -> real. Parameters are not allowed to be ghost. +

+

To simplify the appearance of the basic case where a function's +domain consist of a list of exactly one type, the parentheses around +the domain type can be dropped in this case, as in T -> U. +This innocent simplification requires additional explanation in the +case where that one type is a tuple type, since tuple types are also +written with enclosing parentheses. +If the function takes a single argument that is a tuple, an additional +set of parentheses is needed. For example, the function +

+
function G(pair: (int, bool)): real
+

has type ((int, bool)) -> real. Note the necessary double +parentheses. Similarly, a function that takes no arguments is +different from one that takes a 0-tuple as an argument. For instance, +the functions +

+
function NoArgs(): real
+function Z(unit: ()): real
+

have types () -> real and (()) -> real, respectively. +

+

The function arrow, ->, is right associative, so A -> B -> C means +A -> (B -> C). The other association requires explicit parentheses: +(A -> B) -> C. +

+

Note that the receiver parameter of a named function is not part of +the type. Rather, it is used when looking up the function and can +then be thought of as being captured into the function definition. +For example, suppose function F above is declared in a class C and +that c references an object of type C; then, the following is type +correct: +

+
var f: (int, bool) -> real := c.F;
+

whereas it would have been incorrect to have written something like: +

+
var f': (C, int, bool) -> real := F;  // not correct
+

Outside its type signature, each function value has three properties, +described next. +

+

Every function implicitly takes the heap as an argument. No function +ever depends on the entire heap, however. A property of the +function is its declared upper bound on the set of heap locations it +depends on for a given input. This lets the verifier figure out that +certain heap modifications have no effect on the value returned by a +certain function. For a function f: T -> U and a value t of type +T, the dependency set is denoted f.reads(t) and has type +set<object>. +

+

The second property of functions stems from the fact that every function +is potentially partial. In other words, a property of a function is its +precondition. For a function f: T -> U, the precondition of f for a +parameter value t of type T is denoted f.requires(t) and has type +bool. +

+

The third property of a function is more obvious—the function's +body. For a function f: T -> U, the value that the function yields +for an input t of type T is denoted f(t) and has type U. +

+

Note that f.reads and f.requires are themselves functions. +Suppose f has type T -> U and t has type T. Then, f.reads +is a function of type T -> set<object> whose reads and requires +properties are: +

+
f.reads.reads(t) == f.reads(t)
+f.reads.requires(t) == true
+

f.requires is a function of type T -> bool whose reads and +requires properties are: +

+
f.requires.reads(t) == f.reads(t)
+f.requires.requires(t) == true
+

Dafny also support anonymous functions by means of +lambda expressions. See section 22.9. +

18. Algebraic Datatypes

+

Dafny offers two kinds of algebraic datatypes, those defined +inductively and those defined co-inductively. The salient property of +every datatype is that each value of the type uniquely identifies one +of the datatype's constructors and each constructor is injective in +its parameters. +

+
DatatypeDecl = ( InductiveDatatypeDecl | CoinductiveDatatypeDecl ) 

18.0. Inductive datatypes

+
InductiveDatatypeDecl_ = "datatype" { Attribute } DatatypeName [ GenericParameters ]
+  "=" DatatypeMemberDecl { "|" DatatypeMemberDecl } [ ";" ] 
+DatatypeMemberDecl = { Attribute } DatatypeMemberName [ FormalsOptionalIds ] 
+

The values of inductive datatypes can be seen as finite trees where +the leaves are values of basic types, numeric types, reference types, +co-inductive datatypes, or function types. Indeed, values of +inductive datatypes can be compared using Dafny's well-founded +< ordering. +

+

An inductive datatype is declared as follows: +

+
datatype D<T> = Ctors
+

where Ctors is a nonempty |-separated list of +(datatype) constructors for the datatype. Each constructor has the +form: +

+
C(params)
+

where params is a comma-delimited list of types, optionally +preceded by a name for the parameter and a colon, and optionally +preceded by the keyword ghost. If a constructor has no parameters, +the parentheses after the constructor name can be omitted. If no +constructor takes a parameter, the type is usually called an +enumeration; for example: +

+
datatype Friends = Agnes | Agatha | Jermaine | Jack
+

For every constructor C, Dafny defines a discriminator C?, which +is a member that returns true if and only if the datatype value has +been constructed using C. For every named parameter p of a +constructor C, Dafny defines a destructor p, which is a member +that returns the p parameter from the C call used to construct the +datatype value; its use requires that C? holds. For example, for +the standard List type +

+
datatype List<T> = Nil | Cons(head: T, tail: List<T>)
+

the following holds: +

+
Cons(5, Nil).Cons? && Cons(5, Nil).head == 5
+

Note that the expression +

+
Cons(5, Nil).tail.head
+

is not well-formed, since Cons(5, Nil).tail does not satisfy +Cons?. +

+

The names of the destructors must be unique across all the +constructors of the datatype. A constructor can have the same name as +the enclosing datatype; this is especially useful for +single-constructor datatypes, which are often called +record types. For example, a record type for black-and-white pixels +might be represented as follows: +

+
datatype Pixel = Pixel(x: int, y: int, on: bool)
+

To call a constructor, it is usually necessary only to mention the +name of the constructor, but if this is ambiguous, it is always +possible to qualify the name of constructor by the name of the +datatype. For example, Cons(5, Nil) above can be written +

+
List.Cons(5, List.Nil)
+

As an alternative to calling a datatype constructor explicitly, a +datatype value can be constructed as a change in one parameter from a +given datatype value using the datatype update expression. For any +d whose type is a datatype that includes a constructor C that has +a parameter (destructor) named f of type T, and any expression t +of type T, +

+
d[f := t]
+

constructs a value like d but whose f parameter is t. The +operation requires that d satisfies C?. For example, the +following equality holds: +

+
Cons(4, Nil)[tail := Cons(3, Nil)] == Cons(4, Cons(3, Nil))
+

The datatype update expression also accepts multiple field +names, provided these are distinct. For example, a node of some +inductive datatype for trees may be updated as follows: +

+
node[left := L, right := R]

18.1. Tuple types

+
TupleType_ = "(" [ Type { "," Type } ] ")" 
+

Dafny builds in record types that correspond to tuples and gives these +a convenient special syntax, namely parentheses. For example, what +might have been declared as: +

+
datatype Pair<T,U> = Pair(0: T, 1: U)
+

Dafny provides as the type (T, U) and the constructor (t, u), as +if the datatype's name were “” and its type arguments are given in +round parentheses, and as if the constructor name were “”. Note that +the destructor names are 0 and 1, which are legal identifier names +for members. For example, showing the use of a tuple destructor, here +is a property that holds of 2-tuples (that is, pairs): +

+
(5, true).1 == true
+

Dafny declares n-tuples where n is 0 or 2 or up. There are no +1-tuples, since parentheses around a single type or a single value have +no semantic meaning. The 0-tuple type, (), is often known as the +unit type and its single value, also written (), is known as unit. +

18.2. Co-inductive datatypes

+
CoinductiveDatatypeDecl_ = "codatatype" { Attribute } DatatypeName [ GenericParameters ]
+  "=" DatatypeMemberDecl { "|" DatatypeMemberDecl } [ ";" ] 
+

Whereas Dafny insists that there is a way to construct every inductive +datatype value from the ground up, Dafny also supports +co-inductive datatypes, whose constructors are evaluated lazily and +hence allows infinite structures. A co-inductive datatype is declared +using the keyword codatatype; other than that, it is declared and +used like an inductive datatype. +

+

For example, +

+
codatatype IList<T> = Nil | Cons(head: T, tail: IList<T>)
+codatatype Stream<T> = More(head: T, tail: Stream<T>)
+codatatype Tree<T> = Node(left: Tree<T>, value: T, right: Tree<T>)
+

declare possibly infinite lists (that is, lists that can be either +finite or infinite), infinite streams (that is, lists that are always +infinite), and infinite binary trees (that is, trees where every +branch goes on forever), respectively. +

+

The paper Co-induction Simply, by Leino and +Moskal[20], explains Dafny's implementation and +verification of co-inductive types. We capture the key features from that +paper in this section but the reader is referred to that paper for more +complete details and to supply bibliographic references that we have +omitted. +

+

Mathematical induction is a cornerstone of programming and program +verification. It arises in data definitions (e.g., some algebraic data +structures can be described using induction), it underlies program +semantics (e.g., it explains how to reason about finite iteration and +recursion), and it gets used in proofs (e.g., supporting lemmas about +data structures use inductive proofs). Whereas induction deals with +finite things (data, behavior, etc.), its dual, co-induction, deals with +possibly infinite things. Co-induction, too, is important in programming +and program verification, where it arises in data definitions (e.g., lazy +data structures), semantics (e.g., concurrency), and proofs (e.g., +showing refinement in a co-inductive big-step semantics). It is thus +desirable to have good support for both induction and co-induction in a +system for constructing and reasoning about programs. +

+

Co-datatypes and co-recursive functions make it possible to use lazily +evaluated data structures (like in Haskell or Agda). Co-predicates, +defined by greatest fix-points, let programs state properties of such +data structures (as can also be done in, for example, Coq). For the +purpose of writing co-inductive proofs in the language, we introduce +co-lemmas. Ostensibly, a co-lemma invokes the co-induction hypothesis +much like an inductive proof invokes the induction hypothesis. Underneath +the hood, our co-inductive proofs are actually approached via induction: +co-lemmas provide a syntactic veneer around this approach. +

+

The following example gives a taste of how the co-inductive features in +Dafny come together to give straightforward definitions of infinite +matters. +

+
// infinite streams
+codatatype IStream<T> = ICons(head: T, tail: IStream)
+
+// pointwise product of streams
+function Mult(a: IStream<int>, b: IStream<int>): IStream<int>
+{ ICons(a.head * b.head, Mult(a.tail, b.tail)) }
+
+// lexicographic order on streams
+copredicate Below(a: IStream<int>, b: IStream<int>)
+{ a.head <= b.head && ((a.head == b.head) ==> Below(a.tail, b.tail)) }
+
+// a stream is Below its Square
+colemma Theorem_BelowSquare(a: IStream<int>)
+ensures Below(a, Mult(a, a))
+{ assert a.head <= Mult(a, a).head;
+  if a.head == Mult(a, a).head { 
+    Theorem_BelowSquare(a.tail);
+  }
+}
+
+// an incorrect property and a bogus proof attempt
+colemma NotATheorem_SquareBelow(a: IStream<int>)
+  ensures Below(Mult(a, a), a); // ERROR
+{
+  NotATheorem_SquareBelow(a);
+}
+

It defines a type IStream of infinite streams, with constructor ICons and +destructors head and tail. Function Mult performs pointwise +multiplication on infinite streams of integers, defined using a +co-recursive call (which is evaluated lazily). Co-predicate Below is +defined as a greatest fix-point, which intuitively means that the +co-predicate will take on the value true if the recursion goes on forever +without determining a different value. The co-lemma states the theorem +Below(a, Mult(a, a)). Its body gives the proof, where the recursive +invocation of the co-lemma corresponds to an invocation of the +co-induction hypothesis. +

+

The proof of the theorem stated by the first co-lemma lends +itself to the following intuitive reading: To prove that a is below +Mult(a, a), check that their heads are ordered and, if the heads are +equal, also prove that the tails are ordered. The second co-lemma states +a property that does not always hold; the verifier is not fooled by the +bogus proof attempt and instead reports the property as unproved. +

+

We argue that these definitions in Dafny are simple enough to level the +playing field between induction (which is familiar) and co-induction +(which, despite being the dual of induction, is often perceived as eerily +mysterious). Moreover, the automation provided by our SMT-based verifier +reduces the tedium in writing co-inductive proofs. For example, it +verifies Theorem_BelowSquare from the program text given above— no +additional lemmas or tactics are needed. In fact, as a consequence of the +automatic-induction heuristic in Dafny, the verifier will +automatically verify Theorem_BelowSquare even given an empty body. +

+

Just like there are restrictions on when an inductive hypothesis can be +invoked, there are restriction on how a co-inductive hypothesis can be +used. These are, of course, taken into consideration by our verifier. +For example, as illustrated by the second co-lemma above, invoking the +co-inductive hypothesis in an attempt to obtain the entire proof goal is +futile. (We explain how this works in section 18.2.4.1) Our initial experience +with co-induction in Dafny shows it to provide an intuitive, low-overhead +user experience that compares favorably to even the best of today’s +interactive proof assistants for co-induction. In addition, the +co-inductive features and verification support in Dafny have other +potential benefits. The features are a stepping stone for verifying +functional lazy programs with Dafny. Co-inductive features have also +shown to be useful in defining language semantics, as needed to verify +the correctness of a compiler, so this opens the possibility that +such verifications can benefit from SMT automation. +

18.2.0. Well-Founded Function/Method Definitions

+

The Dafny programming language supports functions and methods. A function +in Dafny is a mathematical function (i.e., it is well-defined, +deterministic, and pure), whereas a method is a body of statements that +can mutate the state of the program. A function is defined by its given +body, which is an expression. To ensure that function definitions +are mathematically consistent, Dafny insists that recursive calls be well-founded, +enforced as follows: Dafny computes the call graph of functions. The strongly connected +components within it are clusters of mutually recursive definitions arranged in +a DAG. This stratifies the functions so that a call from one cluster in the DAG to a +lower cluster is allowed arbitrarily. For an intra-cluster call, Dafny prescribes a proof +obligation that gets taken through the program verifier’s reasoning engine. Semantically, +each function activation is labeled by a rank—a lexicographic tuple determined +by evaluating the function’s decreases clause upon invocation of the function. The +proof obligation for an intra-cluster call is thus that the rank of the callee is strictly less +(in a language-defined well-founded relation) than the rank of the caller. Because +these well-founded checks correspond to proving termination of executable code, we +will often refer to them as “termination checks”. The same process applies to methods. +

+

Lemmas in Dafny are commonly introduced by declaring a method, stating +the property of the lemma in the postcondition (keyword ensures) of +the method, perhaps restricting the domain of the lemma by also giving a +precondition (keyword requires), and using the lemma by invoking +the method. Lemmas are stated, used, and proved as methods, but +since they have no use at run time, such lemma methods are typically +declared as ghost, meaning that they are not compiled into code. The +keyword lemma introduces such a method. Control flow statements +correspond to proof techniques—case splits are introduced with if +statements, recursion and loops are used for induction, and method calls +for structuring the proof. Additionally, the statement: +

+
forall x | P(x) { Lemma(x); }
+

is used to invoke Lemma(x) on all x for which P(x) holds. If +Lemma(x) ensures Q(x), then the forall statement establishes +

+
forall x :: P(x) ==> Q(x).

18.2.1. Defining Co-inductive Datatypes

+

Each value of an inductive datatype is finite, in the sense that it can +be constructed by a finite number of calls to datatype constructors. In +contrast, values of a co-inductive datatype, or co-datatype for short, +can be infinite. For example, a co-datatype can be used to represent +infinite trees. +

+

Syntactically, the declaration of a co-datatype in Dafny looks like that +of a datatype, giving prominence to the constructors (following Coq). The +following example defines a co-datatype Stream of possibly +infinite lists. +

+
codatatype Stream<T> = SNil | SCons(head: T, tail: Stream)
+function Up(n: int): Stream<int> { SCons(n, Up(n+1)) }
+function FivesUp(n: int): Stream<int>
+  decreases 4 - (n - 1) % 5
+{ 
+  if (n % 5 == 0) then
+    SCons(n, FivesUp(n+1))
+  else
+    FivesUp(n+1)
+}
+

Stream is a co-inductive datatype whose values are possibly infinite +lists. Function Up returns a stream consisting of all integers upwards +of n and FivesUp returns a stream consisting of all multiples of 5 +upwards of n . The self-call in Up and the first self-call in FivesUp +sit in productive positions and are therefore classified as co-recursive +calls, exempt from termination checks. The second self-call in FivesUp is +not in a productive position and is therefore subject to termination +checking; in particular, each recursive call must decrease the rank +defined by the decreases clause. +

+

Analogous to the common finite list datatype, Stream declares two +constructors, SNil and SCons. Values can be destructed using match +expressions and statements. In addition, like for inductive datatypes, +each constructor C automatically gives rise to a discriminator C? and +each parameter of a constructor can be named in order to introduce a +corresponding destructor. For example, if xs is the stream +SCons(x, ys), then xs.SCons? and xs.head == x hold. In contrast +to datatype declarations, there is no grounding check for +co-datatypes—since a codatatype admits infinite values, the type is +nevertheless inhabited. +

18.2.2. Creating Values of Co-datatypes

+

To define values of co-datatypes, one could imagine a “co-function” +language feature: the body of a “co-function” could include possibly +never-ending self-calls that are interpreted by a greatest fix-point +semantics (akin to a CoFixpoint in Coq). Dafny uses a different design: +it offers only functions (not “co-functions”), but it classifies each +intra-cluster call as either recursive or co-recursive. Recursive calls +are subject to termination checks. Co-recursive calls may be +never-ending, which is what is needed to define infinite values of a +co-datatype. For example, function Up(n ) in the preceding example is defined as the +stream of numbers from n upward: it returns a stream that starts with n +and continues as the co-recursive call Up(n + 1). +

+

To ensure that co-recursive calls give rise to mathematically consistent definitions, +they must occur only in productive positions. This says that it must be possible to determine +each successive piece of a co-datatype value after a finite amount of work. This +condition is satisfied if every co-recursive call is syntactically guarded by a constructor +of a co-datatype, which is the criterion Dafny uses to classify intra-cluster calls as being +either co-recursive or recursive. Calls that are classified as co-recursive are exempt from +termination checks. +

+

A consequence of the productivity checks and termination checks is that, even in the +absence of talking about least or greatest fix-points of self-calling functions, all functions +in Dafny are deterministic. Since there is no issue of several possible fix-points, +the language allows one function to be involved in both recursive and co-recursive calls, +as we illustrate by the function FivesUp. +

18.2.3. Copredicates

+

Determining properties of co-datatype values may require an infinite +number of observations. To that avail, Dafny provides co-predicates +which are function declarations that use the copredicate keyword. +Self-calls to a co-predicate need not terminate. Instead, the value +defined is the greatest fix-point of the given recurrence equations. +Continuing the preceding example, the following code defines a +co-predicate that holds for exactly those streams whose payload consists +solely of positive integers. The co-predicate definition implicitly also +gives rise to a corresponding prefix predicate, Pos#. The syntax for +calling a prefix predicate sets apart the argument that specifies the +prefix length, as shown in the last line; for this figure, we took the +liberty of making up a coordinating syntax for the signature of the +automatically generated prefix predicate (which is not part of +Dafny syntax). +

+
copredicate Pos(s: Stream<int>)
+{
+  match s
+  case SNil => true
+  case SCons(x, rest) => x > 0 && Pos(rest)
+}
+// Automatically generated by the Dafny compiler:
+predicate Pos#[_k: nat](s: Stream<int>)
+  decreases _k
+{ if _k = 0 then true else
+  match s
+  case SNil => true
+  case SCons(x, rest) => x > 0 && Pos#[_k-1](rest) 
+}
+

Some restrictions apply. To guarantee that the greatest fix-point always +exists, the (implicit functor defining the) co-predicate must be +monotonic. This is enforced by a syntactic restriction on the form of the +body of co-predicates: after conversion to negation normal form (i.e., +pushing negations down to the atoms), intra-cluster calls of +co-predicates must appear only in positive positions—that is, they must +appear as atoms and must not be negated. Additionally, to guarantee +soundness later on, we require that they appear in co-friendly +positions—that is, in negation normal form, when they appear under +existential quantification, the quantification needs to be limited to a +finite range8. Since the evaluation of a co-predicate might not +terminate, co-predicates are always ghost. There is also a restriction on +the call graph that a cluster containing a co-predicate must contain only +co-predicates, no other kinds of functions. +

+

A copredicate declaration of P defines not just a co-predicate, but +also a corresponding prefix predicate P#. A prefix predicate is a +finite unrolling of a co-predicate. The prefix predicate is constructed +from the co-predicate by +

+
    +
  • +

    adding a parameter _k of type nat to denote the prefix length, +

  • +
  • +

    adding the clause “decreases _k;” to the prefix predicate (the +co-predicate itself is not allowed to have a decreases clause), +

  • +
  • +

    replacing in the body of the co-predicate every intra-cluster +call Q(args) to a copredicate by a call Q#[_k - 1](args) +to the corresponding prefix predicate, and then +

  • +
  • +

    prepending the body with if _k = 0 then true else. +

+ +

For example, for co-predicate Pos, the definition of the prefix +predicate Pos# is as suggested above. Syntactically, the prefix-length +argument passed to a prefix predicate to indicate how many times to +unroll the definition is written in square brackets, as in Pos#[k](s). +In the Dafny grammar this is called a HashCall. The definition of +Pos# is available only at clusters strictly higher than that of Pos; +that is, Pos and Pos# must not be in the same cluster. In other +words, the definition of Pos cannot depend on Pos#. +

18.2.3.0. Co-Equality
+

Equality between two values of a co-datatype is a built-in co-predicate. +It has the usual equality syntax s == t, and the corresponding prefix +equality is written s ==#[k] t. And similarly for s != t +and s !=#[k] t. +

18.2.4. Co-inductive Proofs

+

From what we have said so far, a program can make use of properties of +co-datatypes. For example, a method that declares Pos(s) as a +precondition can rely on the stream s containing only positive integers. +In this section, we consider how such properties are established in the +first place. +

18.2.4.0. Properties About Prefix Predicates
+

Among other possible strategies for establishing co-inductive properties +we take the time-honored approach of reducing co-induction to +induction. More precisely, Dafny passes to the SMT solver an +assumption D(P) for every co-predicate P, where: +

+
D(P) = ? x • P(x) <==> ? k • P#[k](x)
+

In other words, a co-predicate is true iff its corresponding prefix +predicate is true for all finite unrollings. +

+

In Sec. 4 of the paper Co-induction Simply a soundness theorem of such +assumptions is given, provided the co-predicates meet the co-friendly +restrictions. An example proof of Pos(Up(n)) for every n > 0 is +here shown: +

+
lemma UpPosLemma(n: int)
+  requires n > 0
+  ensures Pos(Up(n))
+{
+  forall k | 0 <= k { UpPosLemmaK(k, n); } 
+}
+
+lemma UpPosLemmaK(k: nat, n: int)
+  requires n > 0
+  ensures Pos#[k](Up(n))
+  decreases k
+{ 
+  if k != 0 {
+    // this establishes Pos#[k-1](Up(n).tail)
+    UpPosLemmaK(k-1, n+1);
+  }
+}
+

The lemma UpPosLemma proves Pos(Up(n)) for every n > 0. We first +show Pos#[k](Up(n )), for n > 0 and an arbitrary k, and then use +the forall statement to show ? k • Pos#[k](Up(n)). Finally, the axiom +D(Pos) is used (automatically) to establish the co-predicate. +

18.2.4.1. Colemmas
+

As we just showed, with help of the D axiom we can now prove a +co-predicate by inductively proving that the corresponding prefix +predicate holds for all prefix lengths k . In this section, we introduce +co-lemma declarations, which bring about two benefits. The first benefit +is that co-lemmas are syntactic sugar and reduce the tedium of having to +write explicit quantifications over k . The second benefit is that, in +simple cases, the bodies of co-lemmas can be understood as co-inductive +proofs directly. As an example consider the following co-lemma. +

+
colemma UpPosLemma(n: int)
+  requires n > 0
+  ensures Pos(Up(n))
+{ 
+  UpPosLemma(n+1);
+}
+

This co-lemma can be understood as follows: UpPosLemma invokes itself +co-recursively to obtain the proof for Pos(Up(n).tail) (since Up(n).tail +equals Up(n+1)). The proof glue needed to then conclude Pos(Up(n)) is +provided automatically, thanks to the power of the SMT-based verifier. +

18.2.4.2. Prefix Lemmas
+

To understand why the above UpPosLemma co-lemma code is a sound proof, +let us now describe the details of the desugaring of co-lemmas. In +analogy to how a copredicate declaration defines both a co-predicate and +a prefix predicate, a colemma declaration defines both a co-lemma and +prefix lemma. In the call graph, the cluster containing a co-lemma must +contain only co-lemmas and prefix lemmas, no other methods or function. +By decree, a co-lemma and its corresponding prefix lemma are always +placed in the same cluster. Both co-lemmas and prefix lemmas are always +ghosts. +

+

The prefix lemma is constructed from the co-lemma by +

+
    +
  • +

    adding a parameter _k of type nat to denote the prefix length, +

  • +
  • +

    replacing in the co-lemma’s postcondition the positive co-friendly +occurrences of co-predicates by corresponding prefix predicates, +passing in _k as the prefix-length argument, +

  • +
  • +

    prepending _k to the (typically implicit) decreases clause of the co-lemma, +

  • +
  • +

    replacing in the body of the co-lemma every intra-cluster call +M(args) to a colemma by a call M#[_k - 1](args) to the +corresponding prefix lemma, and then +

  • +
  • +

    making the body’s execution conditional on _k != 0. +

+ +

Note that this rewriting removes all co-recursive calls of co-lemmas, +replacing them with recursive calls to prefix lemmas. These recursive +call are, as usual, checked to be terminating. We allow the pre-declared +identifier _k to appear in the original body of the +co-lemma.9 +

+

We can now think of the body of the co-lemma as being replaced by a +forall call, for every k , to the prefix lemma. By construction, +this new body will establish the colemma’s declared postcondition (on +account of the D axiom, and remembering that only the positive +co-friendly occurrences of co-predicates in the co-lemma’s postcondition +are rewritten), so there is no reason for the program verifier to check +it. +

+

The actual desugaring of our co-lemma UpPosLemma is in fact the +previous code for the UpPosLemma lemma except that UpPosLemmaK is +named UpPosLemma# and modulo a minor syntactic difference in how the +k argument is passed. +

+

In the recursive call of the prefix lemma, there is a proof obligation +that the prefixlength argument _k - 1 is a natural number. +Conveniently, this follows from the fact that the body has been wrapped +in an if _k != 0 statement. This also means that the postcondition must +hold trivially when _k = 0, or else a postcondition violation will be +reported. This is an appropriate design for our desugaring, because +co-lemmas are expected to be used to establish co-predicates, whose +corresponding prefix predicates hold trivially when _k = 0. (To prove +other predicates, use an ordinary lemma, not a co-lemma.) +

+

It is interesting to compare the intuitive understanding of the +co-inductive proof in using a co-lemma with the inductive proof in using +the lemma. Whereas the inductive proof is performing proofs for deeper +and deeper equalities, the co-lemma can be understood as producing the +infinite proof on demand. +

19. Newtypes

+
NewtypeDecl = "newtype" { Attribute } NewtypeName "="  
+  ( NumericTypeName [ ":" Type ] "|" Expression(allowLemma: false, allowLambda: true)
+  | Type               
+  ) 
+

A new numeric type can be declared with the newtype +declaration10, for example: +

+
newtype N = x: M | Q
+

where M is a numeric type and Q is a boolean expression that can +use x as a free variable. If M is an integer-based numeric type, +then so is N; if M is real-based, then so is N. If the type M +can be inferred from Q, the “: M” can be omitted. If Q is just +true, then the declaration can be given simply as: +

+
newtype N = M
+

Type M is known as the base type of N. +

+

A newtype is a numeric type that supports the same operations as its +base type. The newtype is distinct from and incompatible with other +numeric types; in particular, it is not assignable to its base type +without an explicit conversion. An important difference between the +operations on a newtype and the operations on its base type is that +the newtype operations are defined only if the result satisfies the +predicate Q, and likewise for the literals of the +newtype.11 +

+

For example, suppose lo and hi are integer-based numerics that +satisfy 0 <= lo <= hi and consider the following code fragment: +

+
var mid := (lo + hi) / 2;
+

If lo and hi have type int, then the code fragment is legal; in +particular, it never overflows, since int has no upper bound. In +contrast, if lo and hi are variables of a newtype int32 declared +as follows: +

+
newtype int32 = x | -0x80000000 <= x < 0x80000000
+

then the code fragment is erroneous, since the result of the addition +may fail to satisfy the predicate in the definition of int32. The +code fragment can be rewritten as +

+
var mid := lo + (hi - lo) / 2;
+

in which case it is legal for both int and int32. +

+

Since a newtype is incompatible with its base type and since all +results of the newtype's operations are members of the newtype, a +compiler for Dafny is free to specialize the run-time representation +of the newtype. For example, by scrutinizing the definition of +int32 above, a compiler may decide to store int32 values using +signed 32-bit integers in the target hardware. +

+

Note that the bound variable x in Q has type M, not N. +Consequently, it may not be possible to state Q about the N +value. For example, consider the following type of 8-bit 2's +complement integers: +

+
newtype int8 = x: int | -128 <= x < 128
+

and consider a variable c of type int8. The expression +

+
-128 <= c < 128
+

is not well-defined, because the comparisons require each operand to +have type int8, which means the literal 128 is checked to be of +type int8, which it is not. A proper way to write this expression +would be to use a conversion operation, described next, on c to +convert it to the base type: +

+
-128 <= int(c) < 128
+

If possible Dafny will represent values of the newtype using +a native data type for the sake of efficiency. This action can +be inhibited or a specific native data type selected by +using the (:nativeType) attribute, as explained in +section 24.1.11. +

+

There is a restriction that the value 0 must be part of every +newtype.12 +

19.0. Numeric conversion operations

+

For every numeric type N, there is a conversion function with the +same name. It is a partial identity function. It is defined when the +given value, which can be of any numeric type, is a member of the type +converted to. When the conversion is from a real-based numeric type +to an integer-based numeric type, the operation requires that the +real-based argument has no fractional part. (To round a real-based +numeric value down to the nearest integer, use the .Trunc member, +see Section 6.1.) +

+

To illustrate using the example from above, if lo and hi have type +int32, then the code fragment can legally be written as follows: +

+
var mid := (int(lo) + int(hi)) / 2;
+

where the type of mid is inferred to be int. Since the result +value of the division is a member of type int32, one can introduce +yet another conversion operation to make the type of mid be int32: +

+
var mid := int32((int(lo) + int(hi)) / 2);
+

If the compiler does specialize the run-time representation for +int32, then these statements come at the expense of two, +respectively three, run-time conversions. +

20. Subset types

+
NatType_ = "nat" 
+

A subset type is a restricted use of an existing type, called +the base type of the subset type. A subset type is like a +combined use of the base type and a predicate on the base +type. +

+

An assignment from a subset type to its base type is always +allowed. An assignment in the other direction, from the base type to +a subset type, is allowed provided the value assigned does indeed +satisfy the predicate of the subset type. +(Note, in contrast, assignments between a newtype and its base type +are never allowed, even if the value assigned is a value of the target +type. For such assignments, an explicit conversion must be used, see +Section 19.0.) +

+

Dafny supports one subset type, namely the built-in type nat, +whose base type is int.13 Type nat +designates the non-negative subrange of int. A simple example that +puts subset type nat to good use is the standard Fibonacci +function: +

+
function Fib(n: nat): nat
+{
+  if n < 2 then n else Fib(n-2) + Fib(n-1)
+}
+

An equivalent, but clumsy, formulation of this function (modulo the +wording of any error messages produced at call sites) would be to use +type int and to write the restricting predicate in pre- and +postconditions: +

+
function Fib(n: int): int
+  requires 0 <= n;  // the function argument must be non-negative
+  ensures 0 <= Fib(n);  // the function result is non-negative
+{
+  if n < 2 then n else Fib(n-2) + Fib(n-1)
+}
+

Type inference will never infer the type of a variable to be a +subset type. It will instead infer the type to be the base type +of the subset type. For example, the type of x in +

+
forall x :: P(x)
+

will be int, even if predicate P declares its argument to have +type nat. +

21. Statements

+
Stmt = ( BlockStmt | AssertStmt | AssumeStmt | PrintStmt | UpdateStmt
+  | VarDeclStatement | IfStmt | WhileStmt | MatchStmt | ForallStmt
+  | CalcStmt | ModifyStmt | LabeledStmt_ | BreakStmt_ | ReturnStmt
+  | YieldStmt | SkeletonStmt
+  ) 
+

Many of Dafny's statements are similar to those in traditional +programming languages, but a number of them are significantly different. +This grammar production shows the different kinds of Dafny statements. +They are described in subsequent sections. +

21.0. Labeled Statement

+
LabeledStmt_ = "label" LabelName ":" Stmt 
+

A labeled statement is just the keyword label followed by and +identifier which is the label followed by a colon and a +statement. The label may be referenced in a break statement +to transfer control to the location after that statement. +

21.1. Break Statement

+
BreakStmt_ = "break" ( LabelName | { "break" } ) ";" 
+

A break statement breaks out of one or more loops (if the +statement consists solely of one or more break keywords), +or else transfers control to just past the statement +bearing the referenced label, if a label was used. +

21.2. Block Statement

+
BlockStmt = "{" { Stmt } "}" 
+

A block statement is just a sequence of statements enclosed by curly braces. +

21.3. Return Statement

+
ReturnStmt = "return" [ Rhs { "," Rhs } ] ";" 
+

A return statement can only be used in a method. It is used +to terminate the execution of the method. +To return a value from a method, the value is assigned to one +of the named return values sometime before a return statement. +In fact, the return values act very much like local variables, +and can be assigned to more than once. Return statements are +used when one wants to return before reaching the end of the +body block of the method. Return statements can be just the +return keyword (where the current value of the out parameters +are used), or they can take a list of values to return. +If a list is given the number of values given must be the +same as the number of named return values. +

21.4. Yield Statement

+
YieldStmt = "yield" [ Rhs { "," Rhs } ] ";" 
+

A yield statement can only be used in an iterator. +See section Iterator types for more details +about iterators. +

+

The body of an iterator is a co-routine. It is used +to yield control to its caller, signaling that a new +set of values for the iterator's yield parameters (if any) +are available. Values are assigned to the yield parameters +at or before a yield statement. +In fact, the yield parameters act very much like local variables, +and can be assigned to more than once. Yield statements are +used when one wants to return new yield parameter values +to the caller. Yield statements can be just the +yield keyword (where the current value of the yield parameters +are used), or they can take a list of values to yield. +If a list is given the number of values given must be the +same as the number of named return yield parameters. +

21.5. Update Statement

+
UpdateStmt = Lhs { "," Lhs } 
+    ( ":=" Rhs { "," Rhs }
+    | ":|" [ "assume" ] Expression(allowLemma: false, allowLambda: true)
+    )
+    ";""
+

The update statement has two forms. The first more normal form +allows for parallel assignment of right-hand-side values to the +left-hand side. For example x,y := y,x to swap the values +of x and y. Of course the common case will have only one +rhs and one lhs. +

+

The form that uses “:|” assigns some values to the left-hand-side +variables such that the boolean expression on the right hand side +is satisfied. This can be used to make a choice as in the +following example where we choose an element in a set. +

+
function PickOne<T>(s: set<T>): T
+  requires s != {}
+{
+  var x :| x in s; x
+}
+

Dafny will report an error if it cannot prove that values +exist which satisfy the condition. +

+

In addition, though the choice is arbitrary, given identical +circumstances the choice will be made consistently. +

+

In the actual grammar two additional forms are recognized for +purposes of error detection. The form: +

+
Lhs { Attribute} ;
+

is assumed to be a mal-formed call. +

+

The form +

+
Lhs ":"
+

is diagnosed as a label in which the user forgot the label keyword. +

21.6. Variable Declaration Statement

+
VarDeclStatement = [ "ghost" ] "var" { Attribute }
+  (
+    LocalIdentTypeOptional { "," { Attribute } LocalIdentTypeOptional }
+    [ ":=" Rhs { "," Rhs }
+    | { Attribute } ":|" [ "assume" ] Expression(allowLemma: false, allowLambda: true)
+    ]
+  |
+    "(" CasePattern { "," CasePattern } ")"
+    ":=" Expression(allowLemma: false, allowLambda: true)
+  )
+  ";"
+

A VarDeclStatement is used to declare one or more local variables in a method or function. +The type of each local variable must be given unless the variable is given an initial +value in which case the type will be inferred. If initial values are given, the number of +values must match the number of variables declared. +

+

Note that the type of each variable must be given individually. The following code +

+
var x, y : int;
+

does not declare both x and y to be of type int. Rather it will give an +error explaining that the type of x is underspecified. +

+

The lefthand side can also contain a tuple of patterns which will be +matched against the right-hand-side. For example: +

+
function returnsTuple() : (int, int)
+{
+    (5, 10)
+}
+
+function usesTuple() : int
+{
+    var (x, y) := returnsTuple();
+    x + y
+}

21.7. Guards

+
Guard = ( "*" | "(" "*" ")" | Expression(allowLemma: true, allowLambda: true) ) 
+

Guards are used in if and while statements as boolean expressions. Guards +take two forms. +

+

The first and most common form is just a boolean expression. +

+

The second form is either * or (*). These have the same meaning. An +unspecified boolean value is returned. The value returned +may be different each time it is executed. +

21.8. Binding Guards

+
BindingGuard(allowLambda) =
+  IdentTypeOptional { "," IdentTypeOptional } { Attribute }
+  ":|" Expression(allowLemma: true, allowLambda)
+

A BindingGuard is used as a condition in an IfStmt. +It binds the identifiers declared in the IdentTypeOptionals. +If there exists one or more assignments of values to the bound identifiers +for which Expression is true, then the BindingGuard +returns true and the identifiers are bound to values that make the +Expression true. +

+

The identifiers bound by BindingGuard are ghost variables +and cannot be assigned to non-ghost variables. They are only +used in specification contexts. +

+

Here is an example: +

+
predicate P(n: int)
+{
+  n % 2 == 0
+}
+
+method M1() returns (ghost y: int)
+    requires x :: P(x)
+    ensures P(y)
+{
+  if x : int :| P(x) {
+      y := x;
+  }
+}

21.9. If Statement

+
IfStmt = "if"
+  ( IfAlternativeBlock
+  | 
+    ( BindingGuard(allowLambda: true)
+    | Guard 
+    | "…" 
+    ) 
+    BlockStmt [ "else" ( IfStmt | BlockStmt ) ]
+  ) 
+

In the simplest form an if statement uses a guard that is a boolean +expression. It then has the same form as in C# and other common +programming languages. For example +

+
  if x < 0 {
+    x := -x;
+  } 
+

If the guard is an asterisk then a non-deterministic choice is made: +

+
  if * {
+    print "True";
+  } else {
+    print "False";
+  }
+
IfAlternativeBlock =
+   "{" { "case" 
+      (
+        BindingGuard(allowLambda:false)
+      | Expression(allowLemma: true, allowLambda: false)
+      ) "=>" { Stmt } } "}" .
+

The if statement using the IfAlternativeBlock form is similar to the +if ... fi construct used in the book “A Discipline of Programming” by +Edsger W. Dijkstra. It is used for a multi-branch if. +

+

For example: +

+
  if {
+    case x <= y => max := y;
+    case y <= x => max := y;
+  }
+

In this form the expressions following the case keyword are called +guards. The statement is evaluated by evaluating the guards in an +undetermined order until one is found that is true or else all have +evaluated to false. If none of them evaluate to true then the if +statement does nothing. Otherwise the statements to the right of => +for the guard that evaluated to true are executed. +

21.10. While Statement

+
WhileStmt = "while"
+  ( LoopSpecWhile WhileAlternativeBlock
+  | ( Guard | "…" ) LoopSpec
+      ( BlockStmt
+      | "…"
+      | /* go body-less */
+      )
+  ) 
+
WhileAlternativeBlock =
+   "{" { "case" Expression(allowLemma: true, allowLambda: false) "=>" { Stmt } } "}" .
+

See section 4.5 for a description of LoopSpec. +

+

The while statement is Dafny's only loop statement. It has two general +forms. +

+

The first form is similar to a while loop in a C-like language. For +example: +

+
  var i := 0;
+  while i < 5 {
+    i := i + 1;
+  }
+

In this form the condition following the while is one of these: +

+
    +
  • A boolean expression. If true it means execute one more +iteration of the loop. If false then terminate the loop. +
  • +
  • An asterisk (*), meaning non-deterministically yield either +true or false as the value of the condition +
  • +
  • An ellipsis (...), which makes the while statement a skeleton +while statement. TODO: What does that mean? +
+ +

The body of the loop is usually a block statement, but it can also +be a skeleton, denoted by ellipsis, or missing altogether. +TODO: Wouldn't a missing body cause problems? Isn't it clearer to have +a block statement with no statements inside? +

+

The second form uses the WhileAlternativeBlock. It is similar to the +do ... od construct used in the book “A Discipline of Programming” by +Edsger W. Dijkstra. For example: +

+
  while
+    decreases if 0 <= r then r else -r;
+  {
+    case r < 0 =>
+      r := r + 1;
+    case 0 < r =>
+      r := r - 1;
+  }
+

For this form the guards are evaluated in some undetermined order +until one is found that is true, in which case the corresponding statements +are executed. If none of the guards evaluates to true then the +loop execution is terminated. +

21.10.0. Loop Specifications

+

For some simple loops such as those mentioned previously Dafny can figure +out what the loop is doing without more help. However in general the user +must provide more information in order to help Dafny prove the effect of +the loop. This information is provided by a LoopSpec. A +LoopSpec provides information about invariants, termination, and +what the loop modifies. LoopSpecs are explained in +section 4.5. However the following sections +present additional rationale and tutorial on loop specifications. +

21.10.0.0. Loop Invariants
+

While loops present a problem for Dafny. There is no way for Dafny to +know in advance how many times the code will go around the loop. But +Dafny needs to consider all paths through a program, which could include +going around the loop any number of times. To make it possible for Dafny +to work with loops, you need to provide loop invariants, another kind of +annotation. +

+

A loop invariant is an expression that holds upon entering a loop, and +after every execution of the loop body. It captures something that is +invariant, i.e. does not change, about every step of the loop. Now, +obviously we are going to want to change variables, etc. each time around +the loop, or we wouldn't need the loop. Like pre- and postconditions, an +invariant is a property that is preserved for each execution of the loop, +expressed using the same boolean expressions we have seen. For example, +

+
var i := 0;
+while i < n
+  invariant 0 <= i
+{
+  i := i + 1;
+}
+

When you specify an invariant, Dafny proves two things: the invariant +holds upon entering the loop, and it is preserved by the loop. By +preserved, we mean that assuming that the invariant holds at the +beginning of the loop, we must show that executing the loop body once +makes the invariant hold again. Dafny can only know upon analyzing the +loop body what the invariants say, in addition to the loop guard (the +loop condition). Just as Dafny will not discover properties of a method +on its own, it will not know any but the most basic properties of a loop +are preserved unless it is told via an invariant. +

21.10.0.1. Loop Termination
+

Dafny proves that code terminates, i.e. does not loop forever, by using +decreases annotations. For many things, Dafny is able to guess the right +annotations, but sometimes it needs to be made explicit. In fact, for all +of the code we have seen so far, Dafny has been able to do this proof on +its own, which is why we haven't seen the decreases annotation explicitly +yet. There are two places Dafny proves termination: loops and recursion. +Both of these situations require either an explicit annotation or a +correct guess by Dafny. +

+

A decreases annotation, as its name suggests, gives Dafny an expression +that decreases with every loop iteration or recursive call. There are two +conditions that Dafny needs to verify when using a decreases expression: +

+
    +
  • that the expression actually gets smaller, and +
  • +
  • that it is bounded. +
+ +

Many times, an integral value (natural or plain integer) is the quantity +that decreases, but other things that can be used as well. In the case of +integers, the bound is assumed to be zero. For example, the following is +a proper use of decreases on a loop (with its own keyword, of course): +

+
  while 0 < i
+    invariant 0 <= i
+    decreases i
+  {
+    i := i - 1;
+  }
+

Here Dafny has all the ingredients it needs to prove termination. The +variable i gets smaller each loop iteration, and is bounded below by +zero. This is fine, except the loop is backwards from most loops, which +tend to count up instead of down. In this case, what decreases is not the +counter itself, but rather the distance between the counter and the upper +bound. A simple trick for dealing with this situation is given below: +

+
  while i < n
+    invariant 0 <= i <= n
+    decreases n - i
+  {
+    i := i + 1;
+  }
+

This is actually Dafny's guess for this situation, as it sees i < n and +assumes that n - i is the quantity that decreases. The upper bound of the +loop invariant implies that 0 <= n – i, and gives Dafny a lower bound on +the quantity. This also works when the bound n is not constant, such as +in the binary search algorithm, where two quantities approach each other, +and neither is fixed. +

+

If the decreases clause of a loop specified “*”, then no +termination check will be performed. Use of this feature is sound only with +respect to partial correctness. +

21.10.0.2. Loop Framing
+

In some cases we also must specify what memory locations the loop body +is allowed to modify. This is done using a modifies clause. +See the discussion of framing in methods for a fuller discussion. +

21.11. Match Statement

+
MatchStmt = "match" Expression(allowLemma: true, allowLambda: true)
+  ( "{" { CaseStatement  } "}"
+  | { CaseStatement }
+  ) 
+
+CaseStatement = CaseBinding_ "=>" { Stmt } 
+

The match statement is used to do case analysis on a value of inductive +or co-inductive type. The form with no leading Ident is for matching +tuples. The expression after the match keyword is the (co)inductive +value being matched. The expression is evaluated and then matched against +each of the case clauses. +

+

There must be a case clause for each constructor of the data type. +The identifier after the case keyword in a case clause, if present, +must be the name of one of the data type's constructors. +If the constructor takes parameters then a parenthesis-enclosed +list of identifiers (with optional type) must follow the +constructor. There must be as many identifiers as the constructor +has parameters. If the optional type is given it must be the same +as the type of the corresponding parameter of the constructor. +If no type is given then the type of the corresponding parameter +is the type assigned to the identifier. +

+

When an inductive value that was created using constructor +expression C1(v1, v2) is matched against a case clause +C2(x1, x2), there is a match provided that C1 and C2 are the +same constructor. In that case x1 is bound to value v1 and +x2 is bound to v2. The identifiers in the case pattern +are not mutable. Here is an example of the use of a match statement. +

+
datatype Tree = Empty | Node(left: Tree, data: int, right: Tree)
+
+// Return the sum of the data in a tree.
+method Sum(x: Tree) returns (r: int)
+{
+  match x {
+    case Empty => r := -1;
+    case Node(t1 : Tree, d, t2) => {
+      var v1 := Sum(t1);
+      var v2 := Sum(t2);
+      r := v1 + d + v2;
+    }
+ }
+}
+

Note that the Sum method is recursive yet has no decreases annotation. +In this case it is not needed because Dafny is able to deduce that +t1 and t2 are smaller (structurally) than x. If Tree had been +coinductive this would not have been possible since x might have been +infinite. +

21.12. Assert Statement

+
AssertStmt = 
+    "assert" { Attribute } 
+    ( Expression(allowLemma: false, allowLambda: true) 
+    | "…" 
+    ) ";" 
+

Assert statements are used to express logical proposition that are +expected to be true. Dafny will attempt to prove that the assertion +is true and give an error if not. Once it has proved the assertion +it can then use its truth to aid in following deductions. +Thus if Dafny is having a difficult time verifying a method +the user may help by inserting assertions that Dafny can prove, +and whose true may aid in the larger verification effort. +

+

If the proposition is ... then (TODO: what does this mean?). +

21.13. Assume Statement

+
AssumeStmt = 
+    "assume" { Attribute } 
+    ( Expression(allowLemma: false, allowLambda: true) 
+    | "…" 
+    ) ";" 
+

The Assume statement lets the user specify a logical proposition +that Dafny may assume to be true without proof. If in fact the +proposition is not true this may lead to invalid conclusions. +

+

An Assume statement would ordinarily be used as part of a larger +verification effort where verification of some other part of +the program required the proposition. By using the Assume statement +the other verification can proceed. Then when that is completed the +user would come back and replace the assume with assert. +

+

If the proposition is ... then (TODO: what does this mean?). +

21.14. Print Statement

+
PrintStmt = 
+    "print" Expression(allowLemma: false, allowLambda: true) 
+    { "," Expression(allowLemma: false, allowLambda: true) } ";" 
+

The print statement is used to print the values of a comma-separated +list of expressions to the console. The generated C# code uses +the System.Object.ToString() method to convert the values to printable +strings. The expressions may of course include strings that are used +for captions. There is no implicit new line added, so to get a new +line you should include “\n” as part of one of the expressions. +Dafny automatically creates overrides for the ToString() method +for Dafny data types. For example, +

+
datatype Tree = Empty | Node(left: Tree, data: int, right: Tree)
+method Main()
+{
+  var x : Tree := Node(Node(Empty, 1, Empty), 2, Empty);
+  print "x=", x, "\n";
+}
+

produces this output: +

+
x=Tree.Node(Tree.Node(Tree.Empty, 1, Tree.Empty), 2, Tree.Empty)

21.15. Forall Statement

+
ForallStmt = "forall"
+  ( "(" [ QuantifierDomain ] ")"
+  | [ QuantifierDomain ]
+  )
+  { [ "free" ] ForAllEnsuresClause_ }
+  [ BlockStmt ] 
+

The forall statement executes ensures expressions or a body in +parallel for all quantified values in the specified range. +The use of the parallel keyword is deprecated. Use +forall instead. There are several variant uses of the forall +statement. And there are a number of restrictions. +

+

In particular a forall statement can be classified as one of the following: +

+
    +
  • Assign - the forall statement is used for simultaneous assignment. +The target must be an array element or an object field. +
  • +
  • Call - The body consists of a single call to a method without side effects +
  • +
  • Proof - The forall has ensure expressions which are effectively +quantified or proved by the body (if present). +
+ +

An assign forall statement is to perform simultaneous assignment. +The following is an excerpt of an example given by Leino in +Developing Verified Programs with Dafny. +When the buffer holding the queue needs to be resized, +the forall statement is used to simultaneously copy the old contents +into the new buffer. +

+
class {:autocontracts} SimpleQueue<Data>
+{
+  ghost var Contents: seq<Data>;
+  var a: array<Data>; // Buffer holding contents of queue.
+  var m: int          // Index head of queue.
+  var n: int;         // Index just past end of queue
+  ...
+  method Enqueue(d: Data)
+    ensures Contents == old(Contents) + [d]
+  {
+    if n == a.Length {
+      var b := a;
+      if m == 0 { b := new Data[2 * a.Length]; }
+      forall (i | 0 <= i < n - m) {
+          b[i] := a[m + i];
+      }
+      a, m, n := b, 0, n - m;
+    }
+    a[n], n, Contents := d, n + 1, Contents + [d];
+  }
+}
+

Here is an example of a call forall statement and the +callee. This is contained in the CloudMake-ConsistentBuilds.dfy +test in the Dafny repository. +

+
forall (cmd', deps', e' | Hash(Loc(cmd', deps', e')) == Hash(Loc(cmd, deps, e))) {
+  HashProperty(cmd', deps', e', cmd, deps, e);
+}
+
+ghost method HashProperty(cmd: Expression, deps: Expression, ext: string, 
+    cmd': Expression, deps': Expression, ext': string)
+  requires Hash(Loc(cmd, deps, ext)) == Hash(Loc(cmd', deps', ext'))
+  ensures cmd == cmd' && deps == deps' && ext == ext'
+

From the same file here is an example of a proof forall statement. +

+
forall (p | p in DomSt(stCombinedC.st) && p in DomSt(stExecC.st))
+  ensures GetSt(p, stCombinedC.st) == GetSt(p, stExecC.st)
+{
+  assert DomSt(stCombinedC.st) <= DomSt(stExecC.st);
+  assert stCombinedC.st == Restrict(DomSt(stCombinedC.st), stExecC.st);
+}
+

More generally the statement +

+
forall x | P(x) { Lemma(x); }
+

is used to invoke Lemma(x) on all x for which P(x) holds. If +Lemma(x) ensures Q(x), then the forall statement establishes +

+
forall x :: P(x) ==> Q(x).
+

The forall statement is also used extensively in the desugared forms of +co-predicates and co-lemmas. See section 18.2. +

+

TODO: List all of the restrictions on the forall statement. +

21.16. Modify Statement

+
ModifyStmt = 
+  "modify" { Attribute } 
+  ( FrameExpression(allowLemma: false, allowLambda: true) 
+    { "," FrameExpression(allowLemma: false, allowLambda: true) } 
+  | "…" 
+  ) 
+  ( BlockStmt | ";" ) 
+

The modify statement has two forms which have two different +purposes. +

+

When the modify statement ends with a semi-colon rather than +a block statement its effect is to say that some undetermined +modifications have been made to any or all of the memory +locations specified by the frame expressions. +In the following example, a value is assigned to field x +followed by a modify statement that may modify any field +in the object. After that we can no longer prove that the field +x still has the value we assigned to it. +

+
class MyClass {
+  var x: int;
+  method N()
+    modifies this
+  {
+    x := 18;
+    modify this;
+    assert x == 18;  // error: cannot conclude this here
+  }
+}
+

When the modify statement is followed by a block statement +we are instead specifying what can be modified in that +block statement. Namely, only memory locations specified +by the frame expressions of the block modify statement +may be modified. Consider the following example. +

+
class ModifyBody {
+  var x: int;
+  var y: int;
+  method M0()
+    modifies this
+  {
+    modify {} {
+      x := 3;  // error: violates modifies clause of the modify statement
+    }
+  }
+  method M1()
+    modifies this
+  {
+    modify {} {
+      var o := new ModifyBody;
+      o.x := 3;  // fine
+    }
+  }
+  method M2()
+    modifies this
+  {
+    modify this {
+      x := 3;
+    }
+  }
+}
+

The first modify statement in the example has an empty +frame expression so it cannot modify any memory locations. +So an error is reported when it tries to modify field x. +

+

The second modify statement also has an empty frame +expression. But it allocates a new object and modifies it. +Thus we see that the frame expressions on a block modify +statement only limits what may be modified of existing +memory. It does not limit what may be modified in +new memory that is allocated. +

+

The third modify statement has a frame expression that +allows it to modify any of the fields of the current object, +so the modification of field x is allowed. +

21.17. Calc Statement

+
CalcStmt = "calc" { Attribute } [ CalcOp ] "{" CalcBody "}" 
+CalcBody = { CalcLine [ CalcOp ] Hints } 
+CalcLine = Expression(allowLemma: false, allowLambda: true) ";" 
+Hints = { ( BlockStmt | CalcStmt ) } 
+CalcOp =
+  ( "==" [ "#" "[" Expression(allowLemma: true, allowLambda: true) "]" ]
+  | "<" | ">" 
+  | "!=" | "<=" | ">="
+  | "<==>" | "==>" | "<=="
+  ) 
+

The calc statement supports calculational proofs using a language feature called program-oriented calculations (poC). This feature was introduced and explained in the Verified Calculations paper by +Leino and Polikarpova[22]. Please see that paper for a more complete explanation +of the calc statement. We here mention only the highlights. +

+

Calculational proofs are proofs by stepwise formula manipulation +as is taught in elementary algebra. The typical example is to prove +an equality by starting with a left-hand-side, and through a series of +transformations morph it into the desired right-hand-side. +

+

Non-syntactic rules further restrict hints to only ghost and side-effect +free statements, as well as impose a constraint that only +chain-compatible operators can be used together in a calculation. The +notion of chain-compatibility is quite intuitive for the operators +supported by poC; for example, it is clear that “<” and “>” cannot be used within +the same calculation, as there would be no relation to conclude between +the first and the last line. See the paper for +a more formal treatment of chain-compatibility. +

+

Note that we allow a single occurrence of the intransitive operator “!=” to +appear in a chain of equalities (that is, “!=” is chain-compatible with +equality but not with any other operator, including itself). Calculations +with fewer than two lines are allowed, but have no effect. If a step +operator is omitted, it defaults to the calculation-wide operator, +defined after the calc keyword. If that operator if omitted, it defaults +to equality. +

+

Here is an example using calc statements to prove an elementary +algebraic identity. As it turns out Dafny is able to prove this without +the calc statements, but it helps to illustrate the syntax. +

+
lemma docalc(x : int, y: int)
+  ensures (x + y) * (x + y) == x * x + 2 * x * y + y * y
+{
+  calc {
+    (x + y) * (x + y); ==
+      // distributive law: (a + b) * c == a * c + b * c
+      x * (x + y) + y * (x + y); ==
+      // distributive law: a * (b + c) == a * b + a * c
+      x * x + x * y + y * x + y * y; ==
+      calc {
+        y * x; ==
+        x * y;
+    }
+      x * x + x * y + x * y + y * y; ==
+      calc {
+        x * y + x * y; ==
+        // a = 1 * a
+        1 * x * y + 1 * x * y; ==
+        // Distributive law
+        (1 + 1) * x * y; ==
+        2 * x * y;
+      }
+      x * x + 2 * x * y + y * y;
+  }
+}
+

Here we started with (x + y) * (x + y) as the left-hand-side +expressions and gradually transformed it using distributive, +commutative and other laws into the desired right-hand-side. +

+

The justification for the steps are given as comments, or as +nested calc statements that prove equality of some sub-parts +of the expression. +

+

The == to the right of the semicolons show the relation between +that expression and the next. Because of the transitivity of +equality we can then conclude that the original left-hand-side is +equal to the final expression. +

+

We can avoid having to supply the relational operator between +every pair of expressions by giving a default operator between +the calc keyword and the opening brace as shown in this abbreviated +version of the above calc statement: +

+
calc == {
+  (x + y) * (x + y);
+  x * (x + y) + y * (x + y);
+  x * x + x * y + y * x + y * y;
+  x * x + x * y + x * y + y * y;
+  x * x + 2 * x * y + y * y;
+}
+

And since equality is the default operator we could have omitted +it after the calc keyword. +The purpose of the block statements or the calc statements between +the expressions is to provide hints to aid Dafny in proving that +step. As shown in the example, comments can also be used to aid +the human reader in cases where Dafny can prove the step automatically. + +

21.18. Skeleton Statement

+
SkeletonStmt = 
+  "…" 
+  ["where" Ident {"," Ident } ":=" 
+    Expression(allowLemma: false, allowLambda: true) 
+    {"," Expression(allowLemma: false, allowLambda: true) } 
+  ] ";" 

22. Expressions

+

The grammar of Dafny expressions follows a hierarchy that +reflects the precedence of Dafny operators. The following +table shows the Dafny operators and their precedence +in order of increasing binding power. +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
operator description
; In LemmaCall;Expression
<==>, ⇔ equivalence (if and only if)
==>, ⇒ implication (implies)
<==, ⇐ reverse implication (follows from)
&&, ∧ conjunction (and)
||, ∨ disjunction (or)
!, ¬ negation (not)
== equality
==#[k] prefix equality (co-inductive)
!= disequality
!=#[k] prefix disequality (co-inductive)
< less than
<= at most
>= at least
> greater than
in collection membership
!in collection non-membership
!! disjointness
+ addition (plus)
- subtraction (minus)
* multiplication (times)
/ division (divided by)
% modulus (mod)
- arithmetic negation (unary minus)
!, ¬ logical negation
Primary Expressions
+

We are calling the UnaryExpressions that are neither +arithmetic nor logical negation the primary expressions. +They are the most tightly bound. +

+

In the grammar entries below we explain the meaning when the +operator for that precedence level is present. If the +operator is not present then we just descend to the +next precedence level. +

22.0. Top-level expressions

+
Expression(allowLemma, allowLambda) = 
+    EquivExpression(allowLemma, allowLambda)
+    [ ";" Expression(allowLemma, allowLambda) ] 
+

The “allowLemma” argument says whether or not the expression +to be parsed is allowed to have the form S;E where S is a call to a lemma. +“allowLemma” should be passed in as “false” whenever the expression to +be parsed sits in a context that itself is terminated by a semi-colon. +

+

The “allowLambda” says whether or not the expression to be parsed is +allowed to be a lambda expression. More precisely, an identifier or +parenthesized-enclosed comma-delimited list of identifiers is allowed to +continue as a lambda expression (that is, continue with a “reads”, “requires”, +or “=>”) only if “allowLambda” is true. This affects function/method/iterator +specifications, if/while statements with guarded alternatives, and expressions +in the specification of a lambda expression itself. +

+

Sometimes an expression will fail unless some relevant fact is known. +In the following example the F_Fails function fails to verify +because the Fact(n) divisor may be zero. But preceding +the expression by a lemma that ensures that the denominator +is not zero allows function F_Succeeds to succeed. +

+
function Fact(n: nat): nat
+{
+  if n == 0 then 1 else n * Fact(n-1)
+}
+
+lemma L(n: nat)
+  ensures 1 <= Fact(n)
+{
+}
+
+function F_Fails(n: nat): int
+{
+  50 / Fact(n)  // error: possible division by zero
+}
+
+function F_Succeeds(n: nat): int
+{
+  L(n);
+  50 / Fact(n)
+}

22.1. Equivalence Expressions

+
EquivExpression(allowLemma, allowLambda) = 
+  ImpliesExpliesExpression(allowLemma, allowLambda)
+  { "<==>" ImpliesExpliesExpression(allowLemma, allowLambda) } 
+

An EquivExpression that contains one or more “<==>”s is +a boolean expression and all the contained ImpliesExpliesExpression +must also be boolean expressions. In that case each “<==>” +operator tests for logical equality which is the same as +ordinary equality. +

+

See section 6.0.0 for an explanation of the +<==> operator as compared with the == operator. +

22.2. Implies or Explies Expressions

+
ImpliesExpliesExpression(allowLemma, allowLambda) =
+  LogicalExpression(allowLemma, allowLambda)
+  [ (  "==>" ImpliesExpression(allowLemma, allowLambda)
+    | "<==" LogicalExpression(allowLemma, allowLambda)
+            { "<==" LogicalExpression(allowLemma, allowLambda) }
+    )
+  ] 
+
+ImpliesExpression(allowLemma, allowLambda) =
+  LogicalExpression(allowLemma, allowLambda) 
+  [  "==>" ImpliesExpression(allowLemma, allowLambda) ] 
+

See section 6.0.2 for an explanation +of the ==> and <== operators. +

22.3. Logical Expressions

+
LogicalExpression(allowLemma, allowLambda) =
+  RelationalExpression(allowLemma, allowLambda)
+  [ ( "&&" RelationalExpression(allowLemma, allowLambda)
+           { "&&" RelationalExpression(allowLemma, allowLambda) }
+    | "||" RelationalExpression(allowLemma, allowLambda)
+           { "||" RelationalExpression(allowLemma, allowLambda) }
+    )
+  ] 
+

See section 6.0.1 for an explanation +of the && (or ∧) and || (or ∨) operators. +

22.4. Relational Expressions

+
RelationalExpression(allowLemma, allowLambda) =
+  Term(allowLemma, allowLambda)
+  [ RelOp Term(allowLemma, allowLambda)
+          { RelOp Term(allowLemma, allowLambda) } ] 
+
+RelOp =
+  ( "==" [ "#" "[" Expression(allowLemma: true, allowLambda: true) "]" ]
+  | "<" | ">" | "<=" | ">=" 
+  | "!=" [ "#" "[" Expression(allowLemma: true, allowLambda: true) "]" ]
+  | "in"
+  | "!in"
+  | "!!"
+  )
+  
+

The relation expressions that have a RelOp compare two or more terms. +As explained in section 6, ==, !=, <, >, <=, and >= +and their corresponding Unicode equivalents are chaining. +

+

The in and !in operators apply to collection types as explained in +section 9 and represent membership or non-membership +respectively. +

+

The !! represents disjointness for sets and multisets as explained in +sections 9.0 and 9.1. +

+

Note that x ==#[k] y is the prefix equality operator that compares +co-inductive values for equality to a nesting level of k, as +explained in section 18.2.3.0. +

22.5. Terms

+
Term(allowLemma, allowLambda) =
+  Factor(allowLemma, allowLambda)
+  { AddOp Factor(allowLemma, allowLambda) } 
+AddOp = ( "+" | "-" ) 
+

Terms combine Factors by adding or subtracting. +Addition has these meanings for different types: +

+
    +
  • Arithmetic addition for numeric types (section 6.1). +
  • +
  • Union for sets and multisets (sections 9.0 and 9.1) +
  • +
  • Concatenation for sequences (section 9.2) +
+ +

Subtraction is arithmetic subtraction for numeric types, and set or multiset +difference for sets and multisets. +

22.6. Factors

+
Factor(allowLemma, allowLambda) =
+  UnaryExpression(allowLemma, allowLambda)
+  { MulOp UnaryExpression(allowLemma, allowLambda) } 
+MulOp = ( "*" | "/" | "%" ) 
+

A Factor combines UnaryExpressions using multiplication, +division, or modulus. For numeric types these are explained in +section 6.1. +

+

Only * has a non-numeric application. It represents set or multiset +intersection as explained in sections 9.0 and 9.1. +

22.7. Unary Expressions

+
UnaryExpression(allowLemma, allowLambda) =
+  ( "-" UnaryExpression(allowLemma, allowLambda)
+  | "!" UnaryExpression(allowLemma, allowLambda)
+  | PrimaryExpression_(allowLemma, allowLambda)
+  )
+  
+

A UnaryExpression applies either numeric (section 6.1) +or logical (section 6.0) negation to its operand. +

22.8. Primary Expressions

+ + +
PrimaryExpression_(allowLemma, allowLambda) =
+  ( MapDisplayExpr { Suffix }
+  | LambdaExpression(allowLemma)
+  | EndlessExpression(allowLemma, allowLambda)
+  | NameSegment { Suffix }
+  | SeqDisplayExpr { Suffix }
+  | SetDisplayExpr { Suffix }
+  | MultiSetExpr { Suffix }
+  | ConstAtomExpression { Suffix }
+  )
+  
+

After descending through all the binary and unary operators we arrive at +the primary expressions which are explained in subsequent sections. As +can be seen, a number of these can be followed by 0 or more Suffixes +to select a component of the value. +

+

If the allowLambda is false then LambdaExpressions are not +recognized in this context. +

22.9. Lambda expressions

+
LambdaExpression(allowLemma) =
+  ( WildIdent
+  | "(" [ IdentTypeOptional { "," IdentTypeOptional } ] ")"
+  )
+  LambdaSpec_
+  LambdaArrow Expression(allowLemma, allowLambda: true) 
+
+LambdaArrow = ( "=>" | "->" ) 
+

See section 4.3 for a description of LambdaSpec. +

+

In addition to named functions, Dafny supports expressions that define +functions. These are called lambda (expression)s (some languages +know them as anonymous functions). A lambda expression has the +form: +

+
(params) specification => body
+

where params is a comma-delimited list of parameter +declarations, each of which has the form x or x: T. The type T +of a parameter can be omitted when it can be inferred. If the +identifier x is not needed, it can be replaced by “_”. If +params consists of a single parameter x (or _) without an +explicit type, then the parentheses can be dropped; for example, the +function that returns the successor of a given integer can be written +as the following lambda expression: +

+
x => x + 1
+

The specification is a list of clauses requires E or +reads W, where E is a boolean expression and W is a frame +expression. +

+

body is an expression that defines the function's return +value. The body must be well-formed for all possible values of the +parameters that satisfy the precondition (just like the bodies of +named functions and methods). In some cases, this means it is +necessary to write explicit requires and reads clauses. For +example, the lambda expression +

+
x requires x != 0 => 100 / x
+

would not be well-formed if the requires clause were omitted, +because of the possibility of division-by-zero. +

+

In settings where functions cannot be partial and there are no +restrictions on reading the heap, the eta expansion of a function +F: T -> U (that is, the wrapping of F inside a lambda expression +in such a way that the lambda expression is equivalent to F) would +be written x => F(x). In Dafny, eta expansion must also account for +the precondition and reads set of the function, so the eta expansion +of F looks like: +

+
x requires F.requires(x) reads F.reads(x) => F(x)

22.10. Left-Hand-Side Expressions

+
Lhs =
+  ( NameSegment { Suffix }
+  | ConstAtomExpression Suffix { Suffix }
+  ) 
+

A left-hand-side expression is only used on the left hand +side of an UpdateStmt. +

+

TODO: Try to give examples showing how these kinds of +left-hand-sides are possible. +

22.11. Right-Hand-Side Expressions

+
Rhs =
+  ( ArrayAllocation_
+  | ObjectAllocation_
+  | Expression(allowLemma: false, allowLambda: true)
+  | HavocRhs_
+  )
+  { Attribute } 
+

An Rhs is either array allocation, an object allocation, +an expression, or a havoc right-hand-side, optionally followed +by one or more Attributes. +

+

Right-hand-side expressions appear in the following constructs: +ReturnStmt, YieldStmt, UpdateStmt, or VarDeclStatement. +These are the only contexts in which arrays or objects may be +allocated, or in which havoc may be produced. +

22.12. Array Allocation

+
ArrayAllocation_ = "new" Type "[" Expressions "]" 
+

This allocates a new single or multi-dimensional array as explained in +section 14. +

22.13. Object Allocation

+
ObjectAllocation_ = "new" Type [ "(" [ Expressions ] ")" ] 
+

This allocated a new object of a class type as explained +in section 12. +

22.14. Havoc Right-Hand-Side

+
HavocRhs_ = "*"
+

A havoc right-hand-side produces an arbitrary value of its associated +type. To get a more constrained arbitrary value the “assign-such-that” +operator (:|) can be used. See section 21.5. +

22.15. Constant Or Atomic Expressions

+
ConstAtomExpression =
+  ( LiteralExpression_
+  | FreshExpression_
+  | OldExpression_
+  | CardinalityExpression_
+  | NumericConversionExpression_
+  | ParensExpression
+  ) 
+

A ConstAtomExpression represent either a constant of some type, or an +atomic expression. A ConstAtomExpression is never an l-value. Also, a +ConstAtomExpression is never followed by an open parenthesis (but could +very well have a suffix that starts with a period or a square bracket). +(The “Also…” part may change if expressions in Dafny could yield +functions.) +

22.16. Literal Expressions

+
LiteralExpression_ = 
+ ( "false" | "true" | "null" | Nat | Dec | 
+   charToken | stringToken | "this") 
+

A literal expression is a boolean literal, a null object reference, +an unsigned integer or real literal, a character or string literal, +or “this” which denote the current object in the context of +an instance method or function. +

22.17. Fresh Expressions

+
FreshExpression_ = "fresh" "(" Expression(allowLemma: true, allowLambda: true) ")" 
+

fresh(e) returns a boolean value that is true if +the objects referenced in expression e were all +freshly allocated in the current method invocation. +The argument of fresh must be either an object reference +or a collection of object references. +

22.18. Old Expressions

+
OldExpression_ = "old" "(" Expression(allowLemma: true, allowLambda: true) ")" 
+

An old expression is used in postconditions. old(e) evaluates to +the value expression e had on entry to the current method. +

22.19. Cardinality Expressions

+
CardinalityExpression_ = "|" Expression(allowLemma: true, allowLambda: true) "|" 
+

For a collection expression c, |c| is the cardinality of c. For a +set or sequence the cardinality is the number of elements. For +a multiset the cardinality is the sum of the multiplicities of the +elements. For a map the cardinality is the cardinality of the +domain of the map. Cardinality is not defined for infinite maps. +For more see section 9. +

22.20. Numeric Conversion Expressions

+
NumericConversionExpression_ = 
+    ( "int" | "real" ) "(" Expression(allowLemma: true, allowLambda: true) ")" 
+

Numeric conversion expressions give the name of the target type +followed by the expression being converted in parentheses. +This production is for int and real as the target types +but this also applies more generally to other numeric types, +e.g. newtypes. See section 19.0. +

22.21. Parenthesized Expression

+
ParensExpression =
+  "(" [ Expressions ] ")" 
+

A ParensExpression is a list of zero or more expressions +enclosed in parentheses. +

+

If there is exactly one expression enclosed then the value is just +the value of that expression. +

+

If there are zero or more than one the result is a tuple value. +See section 18.1. +

22.22. Sequence Display Expression

+
SeqDisplayExpr = "[" [ Expressions ] "]" 
+

A sequence display expression provide a way to constructing +a sequence with given values. For example +

+
[1, 2, 3]
+

is a sequence with three elements in it. +See section 9.2 for more information on +sequences. +

22.23. Set Display Expression

+
SetDisplayExpr = [ "iset" ] "{" [ Expressions ] "}" 
+

A set display expression provide a way to constructing +a set with given elements. If the keyword iset is present +then a potentially infinite set is constructed. +

+

For example +

+
{1, 2, 3}
+

is a set with three elements in it. +See section 9.0 for more information on +sets. +

22.24. Multiset Display or Cast Expression

+
MultiSetExpr = 
+    "multiset" 
+    ( "{" [ Expressions ] "}" 
+    | "(" Expression(allowLemma: true, allowLambda: true) ")" 
+    ) 
+

A multiset display expression provide a way to constructing +a multiset with given elements and multiplicity. For example +

+
multiset{1, 1, 2, 3}
+

is a multiset with three elements in it. The number 1 has a multiplicity of 2, +the others a multiplicity of 1. +

+

On the other hand, a multiset cast expression converts a set or a sequence +into a multiset as shown here: +

+
var s : set<int> := {1, 2, 3};
+var ms : multiset<int> := multiset(s);
+ms := ms + multiset{1};
+var sq : seq<int> := [1, 1, 2, 3];
+var ms2 : multiset<int> := multiset(sq);
+assert ms == ms2;
+

See section 9.1 for more information on +multisets. +

22.25. Map Display Expression

+
MapDisplayExpr = ("map" | "imap" ) "[" [ MapLiteralExpressions ] "]" 
+MapLiteralExpressions = 
+    Expression(allowLemma: true, allowLambda: true) 
+    ":=" Expression(allowLemma: true, allowLambda: true) 
+    { "," Expression(allowLemma: true, allowLambda: true) 
+          ":=" Expression(allowLemma: true, allowLambda: true) 
+    }
+

A map display expression builds a finite or potentially infinite +map from explicit MapLiteralExpressions. For example: +

+
var m := map[1 := "a", 2 := "b"];
+ghost var im := imap[1 := "a", 2 := "b"];
+

Note that imaps may only appear in ghost contexts. See +section 9.3 for more details on maps and imaps. +

22.26. Endless Expression

+
EndlessExpression(allowLemma, allowLambda) =
+  ( IfExpression_(allowLemma, allowLambda)
+  | MatchExpression(allowLemma, allowLambda)
+  | QuantifierExpression(allowLemma, allowLambda)
+  | SetComprehensionExpr(allowLemma, allowLambda)
+  | StmtInExpr Expression(allowLemma, allowLambda)
+  | LetExpr(allowLemma, allowLambda)
+  | MapComprehensionExpr(allowLemma, allowLambda)
+  ) 
+ + + +

EndlessExpression gets it name from the fact that all its alternate +productions have no terminating symbol to end them, but rather they +all end with an Expression at the end. The various +EndlessExpression alternatives are described below. +

22.27. If Expression

+
IfExpression_(allowLemma, allowLambda) = 
+    "if" Expression(allowLemma: true, allowLambda: true)
+    "then" Expression(allowLemma: true, allowLambda: true) 
+    "else" Expression(allowLemma, allowLambda) 
+

The IfExpression is a conditional expression. It first evaluates +the expression following the if. If it evaluates to true then +it evaluates the expression following the then and that is the +result of the expression. If it evaluates to false then the +expression following the else is evaluated and that is the result +of the expression. It is important that only the selected expression +is evaluated as the following example shows. +

+
var k := 10 / x; // error, may divide by 0.
+var m := if x != 0 then 10 / x else 1; // ok, guarded

22.28. Case Bindings and Patterns

+
CaseBinding_ =
+  "case"
+  ( Ident [ "(" CasePattern { "," CasePattern } ")" ] 
+  | "(" CasePattern { "," CasePattern } ")"
+  )
+
+CasePattern = 
+  ( Ident "(" [ CasePattern { "," CasePattern } ] ")"
+  | "(" [ CasePattern { "," Casepattern } ] ")"
+  | IdentTypeOptional
+  ) 
+

Case bindings and patterns are used for (possibly nested) +pattern matching on inductive or coinductive values. +The CaseBinding_ construct is used in +CaseStatement and CaseExpressions. +Besides its use in CaseBinding_, CasePatterns are used +in LetExprs and VarDeclStatements. +

+

When matching an inductive or coinductive value in +a MatchStmt or MatchExpression, there must be +a CaseBinding_ for each constructor. A tuple is +considered to have a single constructor. +The Ident of the CaseBinding_ must match the name +of a constructor (or in the case of a tuple the Ident is +absent and the second alternative is chosen). +The CasePatterns inside the parenthesis are then +matched against the argument that were given to the +constructor when the value was constructed. +The number of CasePatterns must match the number +of parameters to the constructor (or the arity of the +tuple). +

+

The CasePatterns may be nested. The set of non-constructor-name +identifiers contained in a CaseBinding_ must be distinct. +They are bound to the corresponding values in the value being +matched. +

22.29. Match Expression

+
MatchExpression(allowLemma, allowLambda) = 
+  "match" Expression(allowLemma, allowLambda)
+  ( "{" { CaseExpression(allowLemma: true, allowLambda: true) } "}" 
+  | { CaseExpression(allowLemma, allowLambda) } 
+  ) 
+
+CaseExpression(allowLemma, allowLambda) =
+    CaseBinding_ "=>" Expression(allowLemma, allowLambda) 
+

A MatchExpression is used to conditionally evaluate and select an +expression depending on the value of an algebraic type, i.e. an inductive +type, or a co-inductive type. +

+

The Expression following the match keyword is called the +selector. There must be a CaseExpression for each constructor of +the type of the selector. The Ident following the case keyword in a +CaseExpression is the name of a constructor of the selector's type. +It may be absent if the expression being matched is a tuple since these +have no constructor name. +

+

If the constructor has parameters then in the CaseExpression the +constructor name must be followed by a parenthesized list of CasePatterns. +If the constructor has no parameters then the +CaseExpression must not have a following CasePattern list. +All of the identifiers in the CasePatterns must be distinct. +If types for the identifiers are not given then types are inferred +from the types of the constructor's parameters. If types are +given then they must agree with the types of the +corresponding parameters. +

+

A MatchExpression is evaluated by first evaluating the selector. +Then the CaseClause is selected for the constructor that was +used to construct the evaluated selector. If the constructor had +parameters then the actual values used to construct the selector +value are bound to the identifiers in the identifier list. +The expression to the right of the => in the CaseClause is then +evaluated in the environment enriched by this binding. The result +of that evaluation is the result of the MatchExpression. +

+

Note that the braces enclosing the CaseClauses may be omitted. +

22.30. Quantifier Expression

+
QuantifierExpression(allowLemma, allowLambda) =
+    ( "forall" | "exists" ) QuantifierDomain "::"
+    Expression(allowLemma, allowLambda) 
+
+QuantifierDomain =
+  IdentTypeOptional { "," IdentTypeOptional } { Attribute } 
+  [ "|" Expression(allowLemma: true, allowLambda: true) ]
+

A QuantifierExpression is a boolean expression that specifies that a +given expression (the one following the “::”) is true for all (for +forall) or some (for exists) combination of values of the +quantified variables, namely those in the QuantifierDomain. +

+

Here are some examples: +

+
assert forall x : nat | x <= 5 :: x * x <= 25;
+(forall n :: 2 <= n ==> (d :: n < d && d < 2*n))
+

or using the Unicode symbols: +

+
assert  x : nat | x <= 5  x * x <= 25;
+( n  2 <= n ==> ( d  n < d && d < 2*n))
+

The quantifier identifiers are bound within the scope of the +expressions in the QuantifierExpression. +

+

It types are not given for the quantified identifiers then Dafny +attempts to infer their types from the context of the expressions. +It this is not possible the program is in error. +

22.31. Set Comprehension Expressions

+
SetComprehensionExpr(allowLemma, allowLambda) =
+  [ "set" | "iset" ] 
+  IdentTypeOptional { "," IdentTypeOptional } { Attribute }
+  "|" Expression(allowLemma, allowLambda) 
+  [ "::" Expression(allowLemma, allowLambda) ] 
+

A set comprehension expression is an expressions that yields a set +(possibly infinite if iset is used) that +satisfies specified conditions. There are two basic forms. +

+

If there is only one quantified variable the optional "::" Expression +need not be supplied, in which case it is as if it had been supplied +and the expression consists solely of the quantified variable. +That is, +

+
set x : T | P(x)
+

is equivalent to +

+
set x : T | P(x) :: x
+

For the full form +

+
var S := set x1:T1, x2:T2 ... | P(x1, x2, ...) :: Q(x1, x2, ...)
+

the elements of S will be all values resulting from evaluation of Q(x1, x2, ...) +for all combinations of quantified variables x1, x2, ... such that +predicate P(x1, x2, ...) holds. For example, +

+
var S := set x:nat, y:nat | x < 2 && y < 2 :: (x, y)
+

would yield S == {(0, 0), (0, 1), (1, 0), (1,1) } +

+

The types on the quantified variables are optional and if not given Dafny +will attempt to infer them from the contexts in which they are used in the +P or Q expressions. +

+

If a finite set was specified (“set” keyword used), Dafny must be able to prove that the +result is finite otherwise the set comprehension expression will not be +accepted. +

+

Set comprehensions involving reference types such as +

+
set o: object | true
+

are allowed in ghost contexts. In particular, in ghost contexts, the +check that the result is finite should allow any set comprehension +where the bound variable is of a reference type. In non-ghost contexts, +it is not allowed, because–even though the resulting set would be +finite–it is not pleasant or practical to compute at run time. +

22.32. Statements in an Expression

+
StmtInExpr = ( AssertStmt | AssumeStmt | CalcStmt ) 
+

A StmtInExpr is a kind of statement that is allowed to +precede an expression in order to ensure that the expression +can be evaluated without error. For example: +

+
assume x != 0; 10/x
+

Assert, assume and calc statements can be used in this way. +

22.33. Let Expression

+
LetExpr(allowLemma, allowLambda) =
+    [ "ghost" ] "var" CasePattern { "," CasePattern } 
+    ( ":=" | { Attribute } ":|" )
+    Expression(allowLemma: false, allowLambda: true) 
+    { "," Expression(allowLemma: false, allowLambda: true) } ";" 
+    Expression(allowLemma, allowLambda) 
+

A let expression allows binding of intermediate values to identifiers +for use in an expression. The start of the let expression is +signaled by the var keyword. They look much like a local variable +declaration except the scope of the variable only extends to the +enclosed expression. +

+

For example: +

+
var sum := x + y; sum * sum
+

In the simple case the CasePattern is just an identifier with optional +type (which if missing is inferred from the rhs). +

+

The more complex case allows destructuring of constructor expressions. +For example: +

+
datatype Stuff = SCons(x: int, y: int) | Other
+function GhostF(z: Stuff): int
+  requires z.SCons?
+{
+  var SCons(u, v) := z; var sum := u + v; sum * sum
+}

22.34. Map Comprehension Expression

+
MapComprehensionExpr(allowLemma, allowLambda) = 
+  ( "map" | "imap" ) IdentTypeOptional { Attribute } 
+  [ "|" Expression(allowLemma: true, allowLambda: true) ] 
+  "::" Expression(allowLemma, allowLambda) 
+

A MapComprehensionExpr defines a finite or infinite map value +by defining a domain (using the IdentTypeOptional and the optional +condition following the “|”) and for each value in the domain, +giving the mapped value using the expression following the “::”. +

+

For example: +

+
function square(x : int) : int { x * x }
+method test() 
+{
+  var m := map x : int | 0 <= x <= 10 :: x * x;
+  ghost var im := imap x : int :: x * x;
+  ghost var im2 := imap x : int :: square(x);
+}
+

Dafny maps must be finite, so the domain must be constrained to be finite. +But imaps may be infinite as the example shows. The last example shows +creation of an infinite map that gives the same results as a function. +

+ + +

22.35. Name Segment

+
NameSegment = Ident [ GenericInstantiation | HashCall ] 
+

A NameSegment names a Dafny entity by giving its declared +name optionally followed by information to +make the name more complete. For the simple case it is +just an identifier. +

+

If the identifier is for a generic entity it is followed by +a GenericInstantiation which provides actual types for +the type parameters. +

+

To reference a prefix predicate (see section 18.2.3) or +prefix lemma (see section 18.2.4.2), the identifier +must be the name of the copredicate or colemma and it must be +followed by a HashCall. +

22.36. Hash Call

+
HashCall = "#" [ GenericInstantiation ]
+  "[" Expression(allowLemma: true, allowLambda: true) "]" 
+  "(" [ Expressions ] ")" 
+

A HashCall is used to call the prefix for a copredicate or colemma. +In the non-generic case it just insert "#[k]" before the call argument +list where k is the number of recursion levels. +

+

In the case where the colemma is generic, the generic type +argument is given before. Here is an example: +

+
codatatype Stream<T> = Nil | Cons(head: int, stuff: T, tail: Stream)
+
+function append(M: Stream, N: Stream): Stream
+{
+  match M
+  case Nil => N
+  case Cons(t, s, M') => Cons(t, s, append(M', N))
+}
+
+function zeros<T>(s : T): Stream<T>
+{
+  Cons(0, s, zeros(s))
+}
+
+function ones<T>(s: T): Stream<T>
+{
+  Cons(1, s, ones(s))
+}
+
+copredicate atmost(a: Stream, b: Stream)
+{
+  match a
+  case Nil => true
+  case Cons(h,s,t) => b.Cons? && h <= b.head && atmost(t, b.tail)
+}
+
+colemma {:induction false} Theorem0<T>(s: T)
+  ensures atmost(zeros(s), ones(s))
+{
+  // the following shows two equivalent ways to getting essentially the
+  // co-inductive hypothesis
+  if (*) {
+    Theorem0#<T>[_k-1](s);
+  } else {
+    Theorem0(s);
+  }
+}
+
+

where the HashCall is "Theorem0#<T>[_k-1](s);". +See sections 18.2.3 and 18.2.4.2. +

22.37. Suffix

+
Suffix =
+  ( AugmentedDotSuffix_
+  | DatatypeUpdateSuffix_
+  | SubsequenceSuffix_
+  | SlicesByLengthSuffix_
+  | SequenceUpdateSuffix_
+  | SelectionSuffix_
+  | ArgumentListSuffix_
+  ) 
+

The Suffix non-terminal describes ways of deriving a new value from +the entity to which the suffix is appended. There are six kinds +of suffixes which are described below. +

22.37.0. Augmented Dot Suffix

+
AugmentedDotSuffix_ = ". " DotSuffix [ GenericInstantiation | HashCall ] 
+

An augmented dot suffix consists of a simple DotSuffix optionally +followed by either +

+
    +
  • a GenericInstantiation (for the case where the item +selected by the DotSuffix is generic), or +
  • +
  • a HashCall for the case where we want to call a prefix copredicate +or colemma. The result is the result of calling the prefix copredicate +or colemma. +
+

22.37.1. Datatype Update Suffix

+
DatatypeUpdateSuffix_ =
+  "." "(" MemberBindingUpdate { "," MemberBindingUpdate } ")"
+
+MemberBindingUpdate = 
+  ( ident | digits ) ":=" Expression(allowLemma: true, allowLambda: true)
+

A datatype update suffix is used to produce a new datatype value +that is the same as an old datatype value except that the +value corresponding to a given destructor has the specified value. +In a MemberBindingUpdate, the ident or digits is the +name of a destructor (i.e. formal parameter name) for one of the +constructors of the datatype. The expression to the right of the +“:=” is the new value for that formal. +

+

All of the destructors in a DatatypeUpdateSuffix_ must be +for the same constructor, and if they do not cover all of the +destructors for that constructor then the datatype value being +updated must have a value derived from that same constructor. +

+

Here is an example: +

+
module NewSyntax {
+datatype MyDataType = MyConstructor(myint:int, mybool:bool) 
+                    | MyOtherConstructor(otherbool:bool) 
+                    | MyNumericConstructor(42:int)
+
+method test(datum:MyDataType, x:int) 
+    returns (abc:MyDataType, def:MyDataType, ghi:MyDataType, jkl:MyDataType)
+    requires datum.MyConstructor?;
+    ensures abc == datum.(myint := x + 2);
+    ensures def == datum.(otherbool := !datum.mybool);
+    ensures ghi == datum.(myint := 2).(mybool := false);
+    // Resolution error: no non_destructor in MyDataType
+    //ensures jkl == datum.(non_destructor := 5);
+    ensures jkl == datum.(42 := 7);
+{
+    abc := MyConstructor(x + 2, datum.mybool); 
+    abc := datum.(myint := x + 2);
+    def := MyOtherConstructor(!datum.mybool);
+    ghi := MyConstructor(2, false);
+    jkl := datum.(42 := 7);
+
+    assert abc.(myint := abc.myint - 2) == datum.(myint := x);
+}
+}

22.37.2. Subsequence Suffix

+
SubsequenceSuffix_ = 
+  "[" [ Expression(allowLemma: true, allowLambda: true) ]
+      ".." [ Expression(allowLemma: true, allowLambda: true) ]
+  "]" 
+

A subsequence suffix applied to a sequence produces a new sequence whose +elements are taken from a contiguous part of the original sequence. For +example, expression s[lo..hi] for sequence s, and integer-based +numerics lo and hi satisfying 0 <= lo <= hi <= |s|. See +section 9.2.3 for details. +

22.37.3. Slices By Length Suffix

+
SlicesByLengthSuffix_ = 
+  "[" Expression(allowLemma: true, allowLambda: true)
+      ":" Expression(allowLemma: true, allowLambda: true)
+      { ":" Expression(allowLemma: true, allowLambda: true) }
+      [ ":" ]
+  "]" 
+

Applying a SlicesByLengthSuffix_ to a sequence produces a +sequence of subsequences of the original sequence. +See section 9.2.3 for details. +

22.37.4. Sequence Update Suffix

+
SequenceUpdateSuffix_ =
+  "[" Expression(allowLemma: true, allowLambda: true) 
+      ":=" Expression(allowLemma: true, allowLambda: true) 
+  "]" 
+

For a sequence s and expressions i and v, the expression +s[i := v] is the same as the sequence s except that at +index i it has value v. +

22.37.5. Selection Suffix

+
SelectionSuffix_ =
+  "[" Expression(allowLemma: true, allowLambda: true) 
+      { "," Expression(allowLemma: true, allowLambda: true) } 
+  "]" 
+

If a SelectionSuffix_ has only one expression in it, it is a +zero-based index that may be used to select a single element of a +sequence or from a single-dimensional array. +

+

If a SelectionSuffix_ has more than one expression in it, then +it is a list of indices to index into a multi-dimensional array. +The rank of the array must be the same as the number of indices. +

22.37.6. Argument List Suffix

+
ArgumentListSuffix_ = "(" [ Expressions ] ")" 
+

An argument list suffix is a parenthesized list of expressions that +are the arguments to pass to a method or function that is being +called. Applying such a suffix caused the method or function +to be called and the result is the result of the call. +

22.38. Expression Lists

+
Expressions = 
+    Expression(allowLemma: true, allowLambda: true) 
+    { "," Expression(allowLemma: true, allowLambda: true) }
+

The Expressions non-terminal represents a list of +one or more expressions separated by a comma. +

23. Module Refinement

+

TODO: Write this section. +

24. Attributes

+
Attribute = "{" ":" AttributeName [ Expressions ] "}" 
+

Dafny allows many of its entities to be annotated with Attributes. +The grammar shows where the attribute annotations may appear. +

+

Here is an example of an attribute from the Dafny test suite: +

+
{:MyAttribute "hello", "hi" + "there", 57}
+

In general an attribute may have any name the user chooses. It may be +followed by a comma separated list of expressions. These expressions will +be resolved and type-checked in the context where the attribute appears. +

24.0. Dafny Attribute Implementation Details

+

In the Dafny implementation the Attributes type holds the name of +the attribute, a list of Expression arguments and a link to the +previous Attributes object for that Dafny entity. So for each +Dafny entity that has attributes we have a list of them. +

+

Dafny stores attributes on the following kinds of entities: +Declaration (base class), ModuleDefinition, Statement, +AssignmentRhs, LocalVariable, LetExpr, ComprehensionExpr, +MaybeFreeExpression, Specification. +

+

TODO: Dafny internals information should go into a separate +document on Dafny internals. +

24.1. Dafny Attributes

+

All entities that Dafny translates to Boogie have their attributes +passed on to Boogie except for the {:axiom} attribute (which +conflicts with Boogie usage) and the {:trigger} attribute which is +instead converted into a Boogie quantifier trigger. See Section 11 of +[16]. +

+

Dafny has special processing for some attributes. For some attributes the +setting is only looked for on the entity of interest. For others we start +at the entity and if the attribute is not there, look up in the hierarchy +(enclosing class and enclosing modules). The latter case is checked by +the ContainsBoolAtAnyLevel method in the Dafny source. The attribute +declaration closest to the entity overrides those further away. +

+

For attributes with a single boolean expression argument, the attribute +with no argument is interpreted as if it were true. +

+

The attributes that are processed specially by Dafny are described in the +following sections. +

24.1.0. assumption

+

This attribute can only be placed on a local ghost bool +variable of a method. Its declaration cannot have a rhs, but it is +allowed to participate as the lhs of exactly one assignment of the +form: b := b && expr;. Such a variable declaration translates in the +Boogie output to a declaration followed by an assume b command. TODO: +What is the motivation for this? +

24.1.1. autoReq boolExpr

+

For a function declaration, if this attribute is set true at the nearest +level, then its requires clause is strengthed sufficiently so that +it may call the functions that it calls. +

+

For following example +

+
function f(x:int) : bool
+  requires x > 3
+{
+  x > 7
+}
+
+// Should succeed thanks to auto_reqs
+function {:autoReq} g(y:int, b:bool) : bool
+{
+  if b then f(y + 2) else f(2*y)
+}
+

the {:autoReq} attribute causes Dafny to +deduce a requires clause for g as if it had been +declared +

+
function g(y:int, b:bool) : bool
+  requires if b then y + 2 > 3 else 2 * y > 3
+{
+  if b then f(y + 2) else f(2*y)
+}

24.1.2. autocontracts

+

Dynamic frames [9, 17, 32, 33] +are frame expressions that can vary dynamically during +program execution. AutoContracts is an experimental feature that will +fill much of the dynamic-frames boilerplate into a class. +

+

From the user's perspective, what needs to be done is simply: +

+
    +
  • mark the class with {:autocontracts} +
  • +
  • declare a function (or predicate) called Valid() +
+ +

AutoContracts will then: +

+
    +
  • +

    Declare: +

    +
    ghost var Repr: set(object);
  • +
  • +

    For function/predicate Valid(), insert: +

    +
     reads this, Repr
  • +
  • +

    Into body of Valid(), insert (at the beginning of the body): +

    +
     this in Repr && null !in Repr
  • +
  • +

    and also insert, for every array-valued field A declared in the class: +

    +
     (A != null ==> A in Repr) &&
  • +
  • +

    and for every field F of a class type T where T has a field called Repr, also insert: +

    +
     (F != null ==> F in Repr && F.Repr SUBSET Repr && this !in Repr)
  • +
  • +

    Except, if A or F is declared with {:autocontracts false}, then the implication will not +be added. +

  • +
  • +

    For every constructor, add: +

    +
     modifies this
    + ensures Valid() && fresh(Repr - {this})
  • +
  • +

    At the end of the body of the constructor, add: +

    +
     Repr := {this};
    + if (A != null) { Repr := Repr + {A}; }
    + if (F != null) { Repr := Repr + {F} + F.Repr; }
  • +
  • +

    For every method, add: +

+ +
   requires Valid()
+   modifies Repr
+   ensures Valid() && fresh(Repr - old(Repr))
+
    +
  • At the end of the body of the method, add: + +
     if (A != null) { Repr := Repr + {A}; }
    + if (F != null) { Repr := Repr + {F} + F.Repr; }
+

24.1.3. axiom

+

The {:axiom} attribute may be placed on a function or method. +It means that the post-condition may be assumed to be true +without proof. In that case also the body of the function or +method may be omitted. +

+

The {:axiom} attribute is also used for generated reveal_* +lemmas as shown in Section 24.1.12. +

24.1.4. compile

+

The {:compile} attribute takes a boolean argument. It may be applied to +any top-level declaration. If that argument is false then that declaration +will not be compiled into .Net code. +

24.1.5. decl

+

The {:decl} attribute may be placed on a method declaration. It +inhibits the error message that has would be given when the method has a +ensures clauses but no body. +

+

TODO: There are no examples of this in the Dafny tests. What is the motivation +for this? +

24.1.6. fuel

+

The fuel attributes is used to specify how much “fuel” a function should have, +i.e., how many times Z3 is permitted to unfold it's definition. The +new {:fuel} annotation can be added to the function itself, it which +case it will apply to all uses of that function, or it can overridden +within the scope of a module, function, method, iterator, calc, forall, +while, assert, or assume. The general format is: +

+
{:fuel functionName,lowFuel,highFuel}
+

When applied as an annotation to the function itself, omit +functionName. If highFuel is omitted, it defaults to lowFuel + 1. +

+

The default fuel setting for recursive functions is 1,2. Setting the +fuel higher, say, to 3,4, will give more unfoldings, which may make +some proofs go through with less programmer assistance (e.g., with +fewer assert statements), but it may also increase verification time, +so use it with care. Setting the fuel to 0,0 is similar to making the +definition opaque, except when used with all literal arguments. +

24.1.7. heapQuantifier

+

The {:heapQuantifier} attribute may be used on a QuantifierExpression. +When it appears in a quantifier expression it is as if a new heap-valued +quantifier variable was added to the quantification. Consider this code +that is one of the invariants of a while loop. +

+
invariant forall u {:heapQuantifier} :: f(u) == u + r
+

The quantifier is translated into the following Boogie: +

+
(forall q$heap#8: Heap, u#5: int ::
+    {:heapQuantifier} 
+    $IsGoodHeap(q$heap#8) && ($Heap == q$heap#8 || $HeapSucc($Heap, q$heap#8))
+       ==> $Unbox(Apply1(TInt, TInt, f#0, q$heap#8, $Box(u#5))): int == u#5 + r#0);
+

What this is saying is that the quantified expression, f(u) == u + r, +which may depend on the heap, is also valid for any good heap that is either the +same as the current heap, or that is derived from it by heap update operations. +

+

TODO: I think this means that the quantified expression is actually independent of the +heap. Is that true? +

24.1.8. imported

+

If a MethodDecl or FunctionDecl has an {:imported} attribute, +then it is allowed to have a empty body even though it has an ensures +clause. Ordinarily a body would be required in order to provide the +proof of the ensures clause (but the (:axiom) attribute also +provides this facility, so the need for (:imported) is not clear.) +A method or function declaration may be given the (:imported) attribute. This suppresses +the error message that would be given if a method or function with an ensures clause +does not have a body. +

+

TODO: When would this be used? An example would be helpful. +

+

TODO: When is this useful or valid? +

24.1.9. induction

+

The {:induction} attribute controls the application of +proof by induction to two contexts. Given a list of +variables on which induction might be applied, the +{:induction} attribute selects a sub-list of those +variables (in the same order) to which to apply induction. +

+

TODO: Would there be any advantage to taking the order +from the attribute, rather than preserving the original +order? That would seem to give the user more control. +

+

The two contexts are: +

+
    +
  • A method, in which case the bound variables are all the +in-parameters of the method. +
  • +
  • A quantifier expression, in which case the bound variables +are the bound variables of the quantifier expression. +
+ +

The form of the {:induction} attribute is one of the following: +

+
    +
  • {:induction} – apply induction to all bound variables +
  • +
  • {:induction false} – suppress induction, that is, don't apply it to any bound variable +
  • +
  • {:induction L} where L is a list consisting entirely of bound variables +– apply induction to the specified bound variables +
  • +
  • {:induction X} where X is anything else – treat the same as +{:induction}, that is, apply induction to all bound variables. For this +usage conventionally X is true. +
+ +

Here is an example of using it on a quantifier expression: +

+
ghost method Fill_J(s: seq<int>)
+  requires forall i :: 1 <= i < |s| ==> s[i-1] <= s[i]
+  ensures forall i,j {:induction j} :: 0 <= i < j < |s| ==> s[i] <= s[j]
+{
+}

24.1.10. layerQuantifier

+

When Dafny is translating a quantified expression, if it has +a {:layerQuantifier} attribute an additional quantifier +variable is added to the quantifier bound variables. +This variable as the predefined LayerType. +A {:layerQuantifier} attribute may be placed on a quantifier expression. +Translation of Dafny into Boogie defines a LayerType which has defined zero and +successor constructors. +

+

The Dafny source has the comment that “if a function is recursive, +then make the reveal lemma quantifier a layerQuantifier.” +And in that case it adds the attribute to the quantifier. +

+

There is no explicit user of the {:layerQuantifier} attribute +in the Dafny tests. So I believe this attribute is only used +internally by Dafny and not externally. +

+

TODO: Need more complete explanation of this attribute. +

24.1.11. nativeType

+

The {:nativeType} attribute may only be used on a NewtypeDecl +where the base type is an integral type. It can take one of the following +forms: +

+
    +
  • {:nativeType} - With no parameters it has no effect and the NewtypeDecl +have its default behavior which is to choose a native type that can hold any +value satisfying the constraints, if possible, otherwise BigInteger is used. +
  • +
  • {:nativeType true} - Also gives default NewtypeDecl behavior, +but gives an error if base type is not integral. +
  • +
  • {:nativeType false} - Inhibits using a native type. BigInteger is used +for integral types and BitRational for real types. +
  • +
  • {:nativeType "typename"} - This form has an native integral +type name as a string literal. Acceptable values are: “byte”, +“sbyte”, “ushort”, “short”, “uint”, “int”, “ulong” and “long”. +An error is reported if the given data type cannot hold all the +values that satisfy the constraint. +
+

24.1.12. opaque

+

Ordinarily the body of a function is transparent to its users but +sometimes it is useful to hide it. If a function f is given the +{:opaque} attribute then Dafny hides the body of the function, +so that it can only be seen within its recursive clique (if any), +or if the programmer specifically asks to see it via the reveal_f() lemma. +

+

We create a lemma to allow the user to selectively reveal the function's body
+That is, given: +

+
  function {:opaque} foo(x:int, y:int) : int
+    requires 0 <= x < 5
+    requires 0 <= y < 5
+    ensures foo(x, y) < 10
+  { x + y }
+

We produce: +

+
  lemma {:axiom} reveal_foo()
+    ensures forall x:int, y:int {:trigger foo(x,y)} ::
+         0 <= x < 5 && 0 <= y < 5 ==> foo(x,y) == foo_FULL(x,y)
+

where foo_FULL is a copy of foo which does not have its body +hidden. In addition foo_FULL is given the +{:opaque_full} and {:auto_generated} attributes in addition +to the {:opaque} attribute (which it got because it is a copy of foo). +

24.1.13. opaque full

+

The {:opaque_full} attribute is used to mark the full version +of an opaque function. See Section 24.1.12. +

24.1.14. prependAssertToken

+

This is used internally in Dafny as part of module refinement. +It is an attribute on an assert statement. +The Dafny code has the following comment: +

+
// Clone the expression, but among the new assert's attributes, indicate
+// that this assertion is supposed to be translated into a check.  That is,
+// it is not allowed to be just assumed in the translation, despite the fact
+// that the condition is inherited.
+

TODO: Decide if we want to describe this in more detail, or whether +the functionality is already adequately described where +refinement is described. +

24.1.15. tailrecursion

+

This attribute is used on a method declarations. It has a boolean argument. +

+

If specified with a false value it means the user specifically +requested no tail recursion, so none is done. +

+

If specified with a true value, or if not specified +then tail recursive optimization will be attempted subject to +the following conditions: +

+
    +
  • It is an error if the method is a ghost method and tail +recursion was explicitly requested. +
  • +
  • Only direct recursion is supported, not mutually recursive methods. +
  • +
  • If {:tailrecursion true} was specified but the code does not allow it +an error message is given. +
+

24.1.16. timeLimitMultiplier

+

This attribute may be placed on a method or function declaration +and has an integer argument. If {:timeLimitMultiplier X} was +specified a {:timelimit Y} attributed is passed on to Boogie +where Y is X times either the default verification time limit +for a function or method, or times the value specified by the +Boogie timelimit command-line option. +

24.1.17. trigger

+

Trigger attributes are used on quantifiers and comprehensions. +They are translated into Boogie triggers. +

24.1.18. typeQuantifier

+

The {:typeQuantifier} must be used on a quantifier if it +quantifies over types. +

24.2. Boogie Attributes

+

Use the Boogie “/attrHelp” option to get the list of attributes +that Boogie recognizes and their meaning. Here is the output at +the time of this writing. Dafny passes attributes that have +been specified to the Boogie. +

+
Boogie: The following attributes are supported by this implementation.
+
+  ---- On top-level declarations ---------------------------------------------
+
+    {:ignore}
+      Ignore the declaration (after checking for duplicate names).
+
+    {:extern}
+      If two top-level declarations introduce the same name (for example, two
+      constants with the same name or two procedures with the same name), then
+      Boogie usually produces an error message.  However, if at least one of
+      the declarations is declared with :extern, one of the declarations is
+      ignored.  If both declarations are :extern, Boogie arbitrarily chooses
+      one of them to keep; otherwise, Boogie ignore the :extern declaration
+      and keeps the other.
+
+    {:checksum <string>}
+      Attach a checksum to be used for verification result caching.
+
+  ---- On implementations and procedures -------------------------------------
+
+     {:inline N}
+       Inline given procedure (can be also used on implementation).
+       N should be a non-negative number and represents the inlining depth.
+       With /inline:assume call is replaced with "assume false" once inlining depth is reached.
+       With /inline:assert call is replaced with "assert false" once inlining depth is reached.
+       With /inline:spec call is left as is once inlining depth is reached.
+       With the above three options, methods with the attribute {:inline N} are not verified.
+       With /inline:none the entire attribute is ignored.
+
+     {:verify false}
+       Skip verification of an implementation.
+
+     {:vcs_max_cost N}
+     {:vcs_max_splits N}
+     {:vcs_max_keep_going_splits N}
+       Per-implementation versions of
+       /vcsMaxCost, /vcsMaxSplits and /vcsMaxKeepGoingSplits.
+
+     {:selective_checking true}
+       Turn all asserts into assumes except for the ones reachable from
+       assumptions marked with the attribute {:start_checking_here}.
+       Thus, "assume {:start_checking_here} something;" becomes an inverse
+       of "assume false;": the first one disables all verification before
+       it, and the second one disables all verification after.
+
+     {:priority N}
+       Assign a positive priority 'N' to an implementation to control the order
+       in which implementations are verified (default: N = 1).
+
+     {:id <string>}
+       Assign a unique ID to an implementation to be used for verification
+       result caching (default: "<impl. name>:0").
+
+     {:timeLimit N}
+       Set the time limit for a given implementation.
+
+  ---- On functions ----------------------------------------------------------
+
+     {:builtin "spec"}
+     {:bvbuiltin "spec"}
+       Rewrite the function to built-in prover function symbol 'fn'.
+
+     {:inline}
+     {:inline true}
+       Expand function according to its definition before going to the prover.
+
+     {:never_pattern true}
+       Terms starting with this function symbol will never be
+       automatically selected as patterns. It does not prevent them
+       from being used inside the triggers, and does not affect explicit
+       trigger annotations. Internally it works by adding {:nopats ...}
+       annotations to quantifiers.
+
+     {:identity}
+     {:identity true}
+       If the function has 1 argument and the use of it has type X->X for
+       some X, then the abstract interpreter will treat the function as an
+       identity function.  Note, the abstract interpreter trusts the
+       attribute--it does not try to verify that the function really is an
+       identity function.
+
+  ---- On variables ----------------------------------------------------------
+
+     {:existential true}
+       Marks a global Boolean variable as existentially quantified. If
+       used in combination with option /contractInfer Boogie will check
+       whether there a Boolean assignment to the existentials
+       that makes all verification conditions valid.  Without option
+       /contractInfer the attribute is ignored.
+
+  ---- On assert statements --------------------------------------------------
+
+     {:subsumption n}
+       Overrides the /subsumption command-line setting for this assertion.
+
+     {:split_here}
+       Verifies code leading to this point and code leading from this point
+       to the next split_here as separate pieces.  May help with timeouts.
+       May also occasionally double-report errors.
+
+  ---- The end ---------------------------------------------------------------
+
+

However a scan of Boogie's sources shows it checks for the +following attributes. +

+
    +
  • {:$} +
  • +
  • {:$renamed$} +
  • +
  • {:InlineAssume} +
  • +
  • {:PossiblyUnreachable} +
  • +
  • {:__dominator_enabled} +
  • +
  • {:__enabled} +
  • +
  • {:a##post##} +
  • +
  • {:absdomain} +
  • +
  • {:ah} +
  • +
  • {:assumption} +
  • +
  • {:assumption_variable_initialization} +
  • +
  • {:atomic} +
  • +
  • {:aux} +
  • +
  • {:both} +
  • +
  • {:bvbuiltin} +
  • +
  • {:candidate} +
  • +
  • {:captureState} +
  • +
  • {:checksum} +
  • +
  • {:constructor} +
  • +
  • {:datatype} +
  • +
  • {:do_not_predicate} +
  • +
  • {:entrypoint} +
  • +
  • {:existential} +
  • +
  • {:exitAssert} +
  • +
  • {:expand} +
  • +
  • {:extern} +
  • +
  • {:hidden} +
  • +
  • {:ignore} +
  • +
  • {:inline} +
  • +
  • {:left} +
  • +
  • {:linear} +
  • +
  • {:linear_in} +
  • +
  • {:linear_out} +
  • +
  • {:msg} +
  • +
  • {:name} +
  • +
  • {:originated_from_invariant} +
  • +
  • {:partition} +
  • +
  • {:positive} +
  • +
  • {:post} +
  • +
  • {:pre} +
  • +
  • {:precondition_previous_snapshot} +
  • +
  • {:qid} +
  • +
  • {:right} +
  • +
  • {:selective_checking} +
  • +
  • {:si_fcall} +
  • +
  • {:si_unique_call} +
  • +
  • {:sourcefile} +
  • +
  • {:sourceline} +
  • +
  • {:split_here} +
  • +
  • {:stage_active} +
  • +
  • {:stage_complete} +
  • +
  • {:staged_houdini_tag} +
  • +
  • {:start_checking_here} +
  • +
  • {:subsumption} +
  • +
  • {:template} +
  • +
  • {:terminates} +
  • +
  • {:upper} +
  • +
  • {:verified_under} +
  • +
  • {:weight} +
  • +
  • {:yields} +
+

25. Dafny User's Guide

25.0. Installing Dafny From Binaries

25.1. Building Dafny from Source

+

The current version of Dafny only works with Visual Studio 2012, +so if you intend to run Dafny from withing Visual Studio you must +install Visual Studio 2012. +

+

Dafny performs its verification by translating the Dafny source into +the Boogie intermediate verification language. So Dafny references +data structures defined in the Boogie project. So the first step +is to clone and build Boogie from sources. See +https://github.com/boogie-org/boogie. +

+

Follow these steps. +

+

Let work be a working directory. +

+

Clone Boogie using +

+
cd work
+git clone https://github.com/boogie-org/boogie.git
+

Build Boogie using the directions from the Boogie web site, +which for Windows currently are: +

+
    +
  1. Open Source\Boogie.sln in Visual Studio +
  2. +
  3. Right click the Boogie solution in the Solution Explorer and click Enable NuGet Package Restore. You will probably get a prompt asking to confirm this. Choose Yes. +
  4. +
  5. Click BUILD > Build Solution. +
+ +

Clone Dafny using Mercurial. The Dafny directory must be a sibling +of the Boogie directory in order for it to find the Boogie files it needs. +

+
cd work
+hg clone https://hg.codeplex.com/dafny 
+

Download and install the Visual Studio 2012 SDK from +

+ + +

This is needed to build the Visual Studio Extension that +runs Dafny from within Visual Studio 2012. +

+

Build the command-line Dafny executables. +1. Open dafny\Source\Dafny.sln in Visual Studio +2. Click BUILD > Build Solution. +

+

Build and install the Dafny Visual Studio extensions +

+
    +
  1. Open dafny/Source/DafnyExtension.sln in Visual Studio +
  2. +
  3. Click BUILD > Build Solution. +
  4. +
  5. This builds DafnyLanguageService.vsix and DafnyMenu.vsix +in the dafny/Binaries directory. +
  6. +
  7. Install these by clicking on them from Windows Explorer. When +prompted, only check installing into Visual Studio 2012. +
+

25.2. Using Dafny From Visual Studio

+

To test your installation, you can open Dafny test files +from the dafny/Test subdirectory in Visual Studio 2012. +You will want to use “VIEW/Error List” to ensure that +you see any errors that Dafny detects, and +“VIEW/Output” to see the result of any compilation. +

+

An example of a valid Dafny test is +

+
dafny\Test\vstte2012\Tree.dfy
+

You can choose “Dafny/Compile” to compile the Dafny +program to C#. Doing that for the above test +produces Tree.cs and Tree.dll (since this test does +not have a main program). +

+

The following file: +

+
D:\gh\dafny\Test\dafny0\Array.dfy
+

is an example of a Dafny file with verification errors. +The source will show red squiggles or dots where there +are errors, and the Error List window will describe the +errors. +

25.3. Using Dafny From the Command Line

25.3.0. Dafny Command Line Options

+

The command Dafny.exe /? gives the following description of +options that can be passed to Dafny. +

+
  ---- Dafny options ---------------------------------------------------------
+
+  Multiple .dfy files supplied on the command line are concatenated into one
+  Dafny program.
+
+  /dprelude:<file>
+                choose Dafny prelude file
+  /dprint:<file>
+                print Dafny program after parsing it
+                (use - as <file> to print to console)
+  /printMode:<Everything|NoIncludes|NoGhost>
+                NoIncludes disables printing of {:verify false} methods incorporated via the
+                include mechanism, as well as datatypes and fields included from other files.
+                NoGhost disables printing of functions, ghost methods, and proof statements 
+                in implementation methods.  It also disables anything NoIncludes disables.
+  /rprint:<file>
+                print Dafny program after resolving it
+                (use - as <file> to print to console)
+  /dafnyVerify:<n>
+                0 - stop after typechecking
+                1 - continue on to translation, verification, and compilation
+  /compile:<n>  0 - do not compile Dafny program
+                1 (default) - upon successful verification of the Dafny
+                    program, compile Dafny program to .NET assembly
+                    Program.exe (if the program has a Main method) or
+                    Program.dll (othewise), where Program.dfy is the name
+                    of the last .dfy file on the command line
+                2 - always attempt to compile Dafny program to C# program
+                    out.cs, regardless of verification outcome
+                3 - if there is a Main method and there are no verification
+                    errors, compiles program in memory (i.e., does not write
+                    an output file) and runs it
+  /spillTargetCode:<n>
+                0 (default) - don't write the compiled Dafny program (but
+                    still compile it, if /compile indicates to do so)
+                1 - write the compiled Dafny program as a .cs file
+  /dafnycc      Disable features not supported by DafnyCC
+  /noCheating:<n>
+                0 (default) - allow assume statements and free invariants
+                1 - treat all assumptions as asserts, and drop free.
+  /induction:<n>
+                0 - never do induction, not even when attributes request it
+                1 - only apply induction when attributes request it
+                2 - apply induction as requested (by attributes) and also
+                    for heuristically chosen quantifiers
+                3 (default) - apply induction as requested, and for
+                    heuristically chosen quantifiers and ghost methods
+  /inductionHeuristic:<n>
+                0 - least discriminating induction heuristic (that is, lean
+                    toward applying induction more often)
+                1,2,3,4,5 - levels in between, ordered as follows as far as
+                    how discriminating they are:  0 < 1 < 2 < (3,4) < 5 < 6
+                6 (default) - most discriminating
+  /noIncludes   Ignore include directives
+  /noNLarith    Reduce Z3's knowledge of non-linear arithmetic (*,/,%).
+                Results in more manual work, but also produces more predictable behavior.
+  /autoReqPrint:<file>
+                Print out requirements that were automatically generated by autoReq.
+  /noAutoReq    Ignore autoReq attributes
+  /allowGlobals Allow the implicit class '_default' to contain fields, instance functions,
+                and instance methods.  These class members are declared at the module scope,
+                outside of explicit classes.  This command-line option is provided to simply
+                a transition from the behavior in the language prior to version 1.9.3, from
+                which point onward all functions and methods declared at the module scope are
+                implicitly static and fields declarations are not allowed at the module scope.
+                The reference manual is written assuming this option is not given.
+
+
+  /nologo       suppress printing of version number, copyright message
+  /env:<n>      print command line arguments
+                  0 - never, 1 (default) - during BPL print and prover log,
+                  2 - like 1 and also to standard output
+  /wait         await Enter from keyboard before terminating program
+  /xml:<file>   also produce output in XML format to <file>
+
+  ---- Boogie options --------------------------------------------------------
+
+  Multiple .bpl files supplied on the command line are concatenated into one
+  Boogie program.
+
+  /proc:<p>      : limits which procedures to check
+  /noResolve     : parse only
+  /noTypecheck   : parse and resolve only
+
+  /print:<file>  : print Boogie program after parsing it
+                   (use - as <file> to print to console)
+  /pretty:<n>
+                0 - print each Boogie statement on one line (faster).
+                1 (default) - pretty-print with some line breaks.
+  /printWithUniqueIds : print augmented information that uniquely
+                   identifies variables
+  /printUnstructured : with /print option, desugars all structured statements
+  /printDesugared : with /print option, desugars calls
+
+  /overlookTypeErrors : skip any implementation with resolution or type
+                        checking errors
+
+  /loopUnroll:<n>
+                unroll loops, following up to n back edges (and then some)
+  /soundLoopUnrolling
+                sound loop unrolling
+  /printModel:<n>
+                0 (default) - do not print Z3's error model
+                1 - print Z3's error model
+                2 - print Z3's error model plus reverse mappings
+                4 - print Z3's error model in a more human readable way
+  /printModelToFile:<file>
+                print model to <file> instead of console
+  /mv:<file>    Specify file where to save the model in BVD format
+  /enhancedErrorMessages:<n>
+                0 (default) - no enhanced error messages
+                1 - Z3 error model enhanced error messages
+
+  /printCFG:<prefix> : print control flow graph of each implementation in
+                       Graphviz format to files named:
+                         <prefix>.<procedure name>.dot
+
+  /useBaseNameForFileName : When parsing use basename of file for tokens instead
+                            of the path supplied on the command line
+
+  ---- Inference options -----------------------------------------------------
+
+  /infer:<flags>
+                use abstract interpretation to infer invariants
+                The default is /infer:i
+                   <flags> are as follows (missing <flags> means all)
+                   i = intervals
+                   c = constant propagation
+                   d = dynamic type
+                   n = nullness
+                   p = polyhedra for linear inequalities
+                   t = trivial bottom/top lattice (cannot be combined with
+                       other domains)
+                   j = stronger intervals (cannot be combined with other
+                       domains)
+                or the following (which denote options, not domains):
+                   s = debug statistics
+                0..9 = number of iterations before applying a widen (default=0)
+  /noinfer      turn off the default inference, and overrides the /infer
+                switch on its left
+  /checkInfer   instrument inferred invariants as asserts to be checked by
+                theorem prover
+  /interprocInfer
+                perform interprocedural inference (deprecated, not supported)
+  /contractInfer
+                perform procedure contract inference
+  /instrumentInfer
+                h - instrument inferred invariants only at beginning of
+                    loop headers (default)
+                e - instrument inferred invariants at beginning and end
+                    of every block (this mode is intended for use in
+                    debugging of abstract domains)
+  /printInstrumented
+                print Boogie program after it has been instrumented with
+                invariants
+
+  ---- Debugging and general tracing options ---------------------------------
+
+  /trace        blurt out various debug trace information
+  /traceTimes   output timing information at certain points in the pipeline
+  /tracePOs     output information about the number of proof obligations
+                (also included in the /trace output)
+  /log[:method] Print debug output during translation
+
+  /break        launch and break into debugger
+
+  ---- Verification-condition generation options -----------------------------
+
+  /liveVariableAnalysis:<c>
+                0 = do not perform live variable analysis
+                1 = perform live variable analysis (default)
+                2 = perform interprocedural live variable analysis
+  /noVerify     skip VC generation and invocation of the theorem prover
+  /verifySnapshots:<n>
+                verify several program snapshots (named <filename>.v0.bpl
+                to <filename>.vN.bpl) using verification result caching:
+                0 - do not use any verification result caching (default)
+                1 - use the basic verification result caching
+                2 - use the more advanced verification result caching
+  /verifySeparately
+                verify each input program separately
+  /removeEmptyBlocks:<c>
+                0 - do not remove empty blocks during VC generation
+                1 - remove empty blocks (default)
+  /coalesceBlocks:<c>
+                0 = do not coalesce blocks
+                1 = coalesce blocks (default)
+  /vc:<variety> n = nested block (default for /prover:Simplify),
+                m = nested block reach,
+                b = flat block, r = flat block reach,
+                s = structured, l = local,
+                d = dag (default, except with /prover:Simplify)
+                doomed = doomed
+  /traceverify  print debug output during verification condition generation
+  /subsumption:<c>
+                apply subsumption to asserted conditions:
+                0 - never, 1 - not for quantifiers, 2 (default) - always
+  /alwaysAssumeFreeLoopInvariants
+                usually, a free loop invariant (or assume
+                statement in that position) is ignored in checking contexts
+                (like other free things); this option includes these free
+                loop invariants as assumes in both contexts
+  /inline:<i>   use inlining strategy <i> for procedures with the :inline
+                attribute, see /attrHelp for details:
+                  none
+                  assume (default)
+                  assert
+                  spec
+  /printInlined
+                print the implementation after inlining calls to
+                procedures with the :inline attribute (works with /inline)
+  /lazyInline:1
+                Use the lazy inlining algorithm
+  /stratifiedInline:1
+                Use the stratified inlining algorithm
+  /fixedPointEngine:<engine>
+                Use the specified fixed point engine for inference
+  /recursionBound:<n>
+                Set the recursion bound for stratified inlining to
+                be n (default 500)
+  /inferLeastForUnsat:<str>
+                Infer the least number of constants (whose names
+                are prefixed by <str>) that need to be set to
+                true for the program to be correct. This turns
+                on stratified inlining.
+  /smoke        Soundness Smoke Test: try to stick assert false; in some
+                places in the BPL and see if we can still prove it
+  /smokeTimeout:<n>
+                Timeout, in seconds, for a single theorem prover
+                invocation during smoke test, defaults to 10.
+  /causalImplies
+                Translate Boogie's A ==> B into prover's A ==> A && B.
+  /typeEncoding:<m>
+                how to encode types when sending VC to theorem prover
+                   n = none (unsound)
+                   p = predicates (default)
+                   a = arguments
+                   m = monomorphic
+  /monomorphize   
+                Do not abstract map types in the encoding (this is an
+                experimental feature that will not do the right thing if
+                the program uses polymorphism)
+  /reflectAdd   In the VC, generate an auxiliary symbol, elsewhere defined
+                to be +, instead of +.
+
+  ---- Verification-condition splitting --------------------------------------
+
+  /vcsMaxCost:<f>
+                VC will not be split unless the cost of a VC exceeds this
+                number, defaults to 2000.0. This does NOT apply in the
+                keep-going mode after first round of splitting.
+  /vcsMaxSplits:<n>
+                Maximal number of VC generated per method. In keep
+                going mode only applies to the first round.
+                Defaults to 1.
+  /vcsMaxKeepGoingSplits:<n>
+                If set to more than 1, activates the keep
+                going mode, where after the first round of splitting,
+                VCs that timed out are split into <n> pieces and retried
+                until we succeed proving them, or there is only one
+                assertion on a single path and it timeouts (in which
+                case error is reported for that assertion).
+                Defaults to 1.
+  /vcsKeepGoingTimeout:<n>
+                Timeout in seconds for a single theorem prover
+                invocation in keep going mode, except for the final
+                single-assertion case. Defaults to 1s.
+  /vcsFinalAssertTimeout:<n>
+                Timeout in seconds for the single last
+                assertion in the keep going mode. Defaults to 30s.
+  /vcsPathJoinMult:<f>
+                If more than one path join at a block, by how much
+                multiply the number of paths in that block, to accomodate
+                for the fact that the prover will learn something on one
+                paths, before proceeding to another. Defaults to 0.8.
+  /vcsPathCostMult:<f1>
+  /vcsAssumeMult:<f2>
+                The cost of a block is
+                    (<assert-cost> + <f2>*<assume-cost>) * 
+                    (1.0 + <f1>*<entering-paths>)
+                <f1> defaults to 1.0, <f2> defaults to 0.01.
+                The cost of a single assertion or assumption is
+                currently always 1.0.
+  /vcsPathSplitMult:<f>
+                If the best path split of a VC of cost A is into
+                VCs of cost B and C, then the split is applied if
+                A >= <f>*(B+C), otherwise assertion splitting will be
+                applied. Defaults to 0.5 (always do path splitting if
+                possible), set to more to do less path splitting
+                and more assertion splitting.
+  /vcsDumpSplits
+                For split #n dump split.n.dot and split.n.bpl.
+                Warning: Affects error reporting.
+  /vcsCores:<n>
+                Try to verify <n> VCs at once. Defaults to 1.
+  /vcsLoad:<f>  Sets vcsCores to the machine's ProcessorCount * f,
+                rounded to the nearest integer (where 0.0 <= f <= 3.0),
+                but never to less than 1.
+
+  ---- Prover options --------------------------------------------------------
+
+  /errorLimit:<num>
+                Limit the number of errors produced for each procedure
+                (default is 5, some provers may support only 1)
+  /timeLimit:<num>
+                Limit the number of seconds spent trying to verify
+                each procedure
+  /errorTrace:<n>
+                0 - no Trace labels in the error output,
+                1 (default) - include useful Trace labels in error output,
+                2 - include all Trace labels in the error output
+  /vcBrackets:<b>
+                bracket odd-charactered identifier names with |'s.  <b> is:
+                   0 - no (default with non-/prover:Simplify),
+                   1 - yes (default with /prover:Simplify)
+  /prover:<tp>  use theorem prover <tp>, where <tp> is either the name of
+                a DLL containing the prover interface located in the
+                Boogie directory, or a full path to a DLL containing such
+                an interface. The standard interfaces shipped include:
+                    SMTLib (default, uses the SMTLib2 format and calls Z3)
+                    Z3 (uses Z3 with the Simplify format)
+                    Simplify
+                    ContractInference (uses Z3)
+                    Z3api (Z3 using Managed .NET API)
+  /proverOpt:KEY[=VALUE]
+                Provide a prover-specific option (short form /p).
+  /proverLog:<file>
+                Log input for the theorem prover.  Like filenames
+                supplied as arguments to other options, <file> can use the
+                following macros:
+                    @TIME@    expands to the current time
+                    @PREFIX@  expands to the concatenation of strings given
+                              by /logPrefix options
+                    @FILE@    expands to the last filename specified on the
+                              command line
+                In addition, /proverLog can also use the macro '@PROC@',
+                which causes there to be one prover log file per
+                verification condition, and the macro then expands to the
+                name of the procedure that the verification condition is for.
+  /logPrefix:<str>
+                Defines the expansion of the macro '@PREFIX@', which can
+                be used in various filenames specified by other options.
+  /proverLogAppend
+                Append (not overwrite) the specified prover log file
+  /proverWarnings
+                0 (default) - don't print, 1 - print to stdout,
+                2 - print to stderr
+  /proverMemoryLimit:<num>
+                Limit on the virtual memory for prover before
+                restart in MB (default:100MB)
+  /restartProver
+                Restart the prover after each query
+  /proverShutdownLimit<num>
+                Time between closing the stream to the prover and
+                killing the prover process (default: 0s)
+  /platform:<ptype>,<location>
+                ptype = v11,v2,cli1
+                location = platform libraries directory
+
+  Simplify specific options:
+  /simplifyMatchDepth:<num>
+                Set Simplify prover's matching depth limit
+
+  Z3 specific options:
+  /z3opt:<arg>  specify additional Z3 options
+  /z3multipleErrors
+                report multiple counterexamples for each error
+  /useArrayTheory
+                use Z3's native theory (as opposed to axioms).  Currently
+                implies /monomorphize.
+  /useSmtOutputFormat
+                Z3 outputs a model in the SMTLIB2 format.
+  /z3types      generate multi-sorted VC that make use of Z3 types
+  /z3lets:<n>   0 - no LETs, 1 - only LET TERM, 2 - only LET FORMULA,
+                3 - (default) any
+  /z3exe:<path>
+                path to Z3 executable
+
+  CVC4 specific options:
+  /cvc4exe:<path>
+                path to CVC4 executable
+

26. References

+
+

References

+
[0]  Mike Barnett, Bor-Yuh Evan Chang, Robert DeLine, Bart Jacobs, and K. Rustan M. + Leino. + Boogie: A modular reusable verifier for object-oriented programs. + In Frank S. de Boer, Marcello M. Bonsangue, Susanne Graf, and + Willem-Paul de Roever, editors, Formal Methods for Components and + Objects: 4th International Symposium, FMCO 2005, volume 4111, pages + 364–387. Springer, September 2006. 🔎
+
[1]  Yves Bertot and Pierre Castéran. + Interactive Theorem Proving and Program Development — + Coq'Art: The Calculus of Inductive Constructions. + Texts in Theoretical Computer Science. Springer, 2004. 🔎
+
[2]  Ana Bove, Peter Dybjer, and Ulf Norell. + A brief overview of Agda — a functional language with dependent + types. + In Stefan Berghofer, Tobias Nipkow, Christian Urban, and Makarius + Wenzel, editors, Theorem Proving in Higher Order Logics, 22nd + International Conference, TPHOLs 2009, volume 5674 of Lecture Notes in + Computer Science, pages 73–78. Springer, August 2009. 🔎
+
[3]  Juanito Camilleri and Tom Melham. + Reasoning with inductively defined relations in the HOL theorem + prover. + Technical Report 265, University of Cambridge Computer Laboratory, + 1992. 🔎
+
[4]  Leonardo de Moura and Nikolaj Bjørner. + Z3: An efficient SMT solver. + In C. R. Ramakrishnan and Jakob Rehof, editors, Tools and + Algorithms for the Construction and Analysis of Systems, 14th International + Conference, TACAS 2008, volume 4963, pages 337–340. Springer, March–April + 2008. 🔎
+
[5]  K. Rustan M. Leino et al. + Dafny source code. + Available at http://dafny.codeplex.com🔎
+
[6]  John Harrison. + Inductive definitions: Automation and application. + In E. Thomas Schubert, Phillip J. Windley, and Jim Alves-Foss, + editors, TPHOLs 1995, volume 971 of LNCS, pages 200–213. + Springer, 1995. 🔎
+
[7]  C. A. R. Hoare. + An axiomatic basis for computer programming. + Communications of the ACM, 12 (10): + 576–580,583, October 1969. 🔎
+
[8]  Bart Jacobs and Jan Rutten. + An introduction to (co)algebra and (co)induction. + In Davide Sangiorgi and Jan Rutten, editors, Advanced Topics in + Bisimulation and Coinduction, number 52 in Cambridge Tracts in Theoretical + Computer Science, pages 38–99. Cambridge University Press, October 2011. 🔎
+
[9]  Ioannis T. Kassios. + Dynamic frames: Support for framing, dependencies and sharing without + restrictions. + In Jayadev Misra, Tobias Nipkow, and Emil Sekerinski, editors, + FM 2006: Formal Methods, 14th International Symposium on Formal + Methods, volume 4085, pages 268–283. Springer, August 2006. 🔎
+
[10]  Matt Kaufmann, Panagiotis Manolios, and J Strother Moore. + Computer-Aided Reasoning: An Approach. + Kluwer Academic Publishers, 2000. 🔎
+
[11]  Dexter Kozen and Alexandra Silva. + Practical coinduction. + Technical Report http://hdl.handle.net/1813/30510, Comp. and + Inf. Science, Cornell Univ., 2012. 🔎
+
[12]  Alexander Krauss. + Automating Recursive Definitions and Termination Proofs in + Higher-Order Logic. + PhD thesis, Technische Universität München, 2009. 🔎
+
[13]  K. Rustan M. Leino. + Main microsoft research dafny web page, a. + Available at + http://research.microsoft.com/en-us/projects/dafny🔎
+
[14]  K. Rustan M. Leino. + Dafny quick reference, b. + Available at + http://research.microsoft.com/en-us/projects/dafny/reference.aspx🔎
+
[15]  K. Rustan M. Leino. + Try dafny in your browser, c. + Available at http://rise4fun.com/Dafny🔎
+
[16]  K. Rustan M. Leino. + This is Boogie 2. + Manuscript KRML 178, 2008. + Available at http://research.microsoft.com/~leino/papers.html🔎
+
[17]  K. Rustan M. Leino. + Dynamic-frame specifications in dafny. + JML seminar, Dagstuhl, Germany, 2009. + Available at + http://research.microsoft.com/en-us/um/people/leino/papers/dafny-jml-dagstuhl-2009.pptx🔎
+
[18]  K. Rustan M. Leino. + Dafny: An automatic program verifier for functional correctness. + In Edmund M. Clarke and Andrei Voronkov, editors, LPAR-16, + volume 6355, pages 348–370. Springer, April 2010. 🔎
+
[19]  K. Rustan M. Leino. + Automating induction with an SMT solver. + In Viktor Kuncak and Andrey Rybalchenko, editors, Verification, + Model Checking, and Abstract Interpretation — 13th International + Conference, VMCAI 2012, volume 7148, pages 315–331. Springer, January 2012. 🔎
+
[20]  K. Rustan M. Leino and Michal Moskal. + Co-induction simply: Automatic co-inductive proofs in a program + verifier. + Manuscript KRML 230, 2014a. + Available at + http://research.microsoft.com/en-us/um/people/leino/papers/krml230.pdf🔎
+
[21]  K. Rustan M. Leino and Michał Moskal. + Co-induction simply — automatic co-inductive proofs in a program + verifier. + In FM 2014, volume 8442 of LNCS, pages 382–398. + Springer, May 2014b. 🔎
+
[22]  K. Rustan M. Leino and Nadia Polikarpova. + Verified calculations. + Manuscript KRML 231, 2013. + Available at + http://research.microsoft.com/en-us/um/people/leino/papers/krml231.pdf🔎
+
[23]  K. Rustan M. Leino and Philipp Rümmer. + A polymorphic intermediate verification language: Design and logical + encoding. + In Javier Esparza and Rupak Majumdar, editors, Tools and + Algorithms for the Construction and Analysis of Systems, 16th International + Conference, TACAS 2010, volume 6015, pages 312–327. Springer, March 2010. 🔎
+
[24]  Xavier Leroy and Hervé Grall. + Coinductive big-step operational semantics. + Information and Computation, 207 (2): + 284–304, February 2009. 🔎
+
[25]  Panagiotis Manolios and J Strother Moore. + Partial functions in ACL2. + Journal of Automated Reasoning, 31 (2): + 107–127, 2003. 🔎
+
[26]  Robin Milner. + A Calculus of Communicating Systems. + Springer-Verlag New York, Inc., 1982. + ISBN 0387102353. 🔎
+
[27]  Hanspeter Mössenböck, Markus Löberbauer, and Albrecht + Wöß. + The compiler generator coco/r. + Open source from University of Linz, 2013. + Available at + http://www.ssw.uni-linz.ac.at/Research/Projects/Coco/🔎
+
[28]  Tobias Nipkow and Gerwin Klein. + Concrete Semantics with Isabelle/HOL. + Springer, 2014. 🔎
+
[29]  Christine Paulin-Mohring. + Inductive definitions in the system Coq — rules and properties. + In TLCA '93, volume 664 of LNCS, pages 328–345. + Springer, 1993. 🔎
+
[30]  Lawrence C. Paulson. + A fixedpoint approach to implementing (co)inductive definitions. + In Alan Bundy, editor, CADE-12, volume 814 of LNCS, + pages 148–161. Springer, 1994. 🔎
+
[31]  Benjamin C. Pierce, Chris Casinghino, Marco Gaboardi, Michael Greenberg, + Catalin Hriţcu, Vilhelm Sjöberg, and Brent Yorgey. + Software Foundations. + http://www.cis.upenn.edu/~bcpierce/sf, version 3.2 edition, + January 2015. 🔎
+
[32]  Jan Smans, Bart Jacobs, Frank Piessens, and Wolfram Schulte. + Automatic verifier for Java-like programs based on dynamic frames. + In José Luiz Fiadeiro and Paola Inverardi, editors, + Fundamental Approaches to Software Engineering, 11th International + Conference, FASE 2008, volume 4961, pages 261–275. Springer, March–April + 2008. 🔎
+
[33]  Jan Smans, Bart Jacobs, and Frank Piessens. + Implicit dynamic frames: Combining dynamic frames and separation + logic. + In Sophia Drossopoulou, editor, ECOOP 2009 — Object-Oriented + Programming, 23rd European Conference, volume 5653, pages 148–172. + Springer, July 2009. 🔎
+
[34]  Nikhil Swamy, Juan Chen, Cédric Fournet, Pierre-Yves Strub, Karthikeyan + Bhargavan, and Jean Yang. + Secure distributed programming with value-dependent types. + In ICFP 2011, pages 266–278. ACM, September 2011. 🔎
+
[35]  Alfred Tarski. + A lattice-theoretical fixpoint theorem and its applications. + Pacific Journal of Mathematics, 5: 285–309, 1955. 🔎
+
[36]  Glynn Winskel. + The Formal Semantics of Programming Languages: An + Introduction. + MIT Press, 1993. 🔎
+
+
+ +
+

0.This will change in a future version of Dafny that +will support both nullable and (by default) non-null reference +types. +

+
+

1.Being equality-supporting is just one of many +modes that one can imagine types in a rich type system to have. +For example, other modes could include having a total order, +being zero-initializable, and possibly being uninhabited. If +Dafny were to support more modes in the future, the “( )”-suffix +syntax may be extended. For now, the suffix can only indicate the +equality-supporting mode. +

+
+

2.Now that Dafny supports built-in tuples, the +plan is to change the sequence slice operation to return not a +sequence of subsequences, but a tuple of subsequences. +

+
+

3.This is likely to change in the future to disallow +multiple occurrences of the same key. +

+
+

4.This is likely to change in the future as +follows: The in and !in operations will no longer be +supported on maps. Instead, for any map m, m.Domain will +return its domain as a set and m.Range will return, also as a +set, the image of m under its domain. +

+
+

5.Dafny is open source at dafny.codeplex.com and can also be used online at rise4fun.com/dafny. +

+
+

6.The current compiler restriction that object cannot +be used as a type parameter needs to be removed. +

+
+

7.It would make sense to rename the special +fields _reads and _modifies to have the same names as the +corresponding keywords, reads and modifies, as is done for +function values. Also, the various _decreasesi fields can +combined into one field named decreases whose type is a +n-tuple. Thse changes may be incorporated into a future version +of Dafny. +

+

+
+

8.Higher-order function support in Dafny is +rather modest and typical reasoning patterns do not involve them, so this +restriction is not as limiting as it would have been in, e.g., Coq. +

+
+

9.Note, two places where co-predicates +and co-lemmas are not analogous are: co-predicates must not make +recursive calls to their prefix predicates, and co-predicates cannot +mention _k. +

+
+

10.Should newtype perhaps be renamed to numtype? +

+
+

11.Would it be useful to also +automatically define predicate N?(m: M) { Q }? +

+
+

12.The restriction is due to a current limitation in +the compiler. This will change in the future and will also open +up the possibility for subset types and non-null reference +types. +

+
+

13.A future version of Dafny will support +user-defined subset types. +

+ + + + + Binary files /tmp/tmpmYgONk/eNTA8SFwit/dafny-1.9.5/Docs/DafnyRef/out/DafnyRef.pdf and /tmp/tmpmYgONk/LnaTbJX_hb/dafny-1.9.7/Docs/DafnyRef/out/DafnyRef.pdf differ diff -Nru dafny-1.9.5/Docs/DafnyRef/paper-full.bib dafny-1.9.7/Docs/DafnyRef/paper-full.bib --- dafny-1.9.5/Docs/DafnyRef/paper-full.bib 1970-01-01 00:00:00.000000000 +0000 +++ dafny-1.9.7/Docs/DafnyRef/paper-full.bib 2016-06-05 21:11:14.000000000 +0000 @@ -0,0 +1,577 @@ +@string{lncs = "Lecture Notes in Computer Science"} +@string{lnai = "Lecture Notes in Artificial Intelligence"} + +@InProceedings{Dafny:LPAR16, + author = {K. Rustan M. Leino}, + title = {Dafny: An Automatic Program Verifier for Functional Correctness}, + booktitle = {LPAR-16}, + year = {2010}, + volume = {6355}, + series = lncs, + publisher = {Springer}, + month = apr, + editor = {Edmund M. Clarke and Andrei Voronkov}, + pages = {348-370}, +} + +@InCollection{LeinoMoskal:UsableProgramVerification, + author = {K. Rustan M. Leino and Micha{\l} Moskal}, + title = {Usable Auto-Active Verification}, + booktitle = {UV10 (Usable Verification) workshop}, + year = {2010}, + editor = {Tom Ball and Lenore Zuck and N. Shankar}, + month = nov, + publisher = {\url{http://fm.csl.sri.com/UV10/}}, +} + +@InProceedings{Leino:VMCAI2012, + author = {K. Rustan M. Leino}, + title = {Automating Induction with an {SMT} Solver}, + booktitle = {Verification, Model Checking, and Abstract Interpretation --- 13th International Conference, VMCAI 2012}, + pages = {315-331}, + year = {2012}, + editor = {Viktor Kuncak and Andrey Rybalchenko}, + volume = {7148}, + series = lncs, + month = jan, + publisher = {Springer}, +} + +@InProceedings{LeinoMonahan:Comprehensions, + author = {K. Rustan M. Leino and Rosemary Monahan}, + title = {Reasoning about Comprehensions with First-Order {SMT} Solvers}, + booktitle = {Proceedings of the 2009 ACM Symposium on Applied Computing (SAC)}, + editor = {Sung Y. Shin and Sascha Ossowski}, + publisher = {ACM}, + month = mar, + year = 2009, + pages = {615-622}, +} + +@TechReport{VeriFast:TR, + author = {Bart Jacobs and Frank Piessens}, + title = {The {VeriFast} program verifier}, + institution = {Department of Computer Science, Katholieke Universiteit Leuven}, + year = {2008}, + number = {CW-520}, + month = aug, +} + +@InProceedings{LGLM:BVD, + author = {Le Goues, Claire and K. Rustan M. Leino and Micha{\l} Moskal}, + title = {The {B}oogie {V}erification {D}ebugger (Tool Paper)}, + booktitle = {Software Engineering and Formal Methods --- 9th International Conference, SEFM 2011}, + pages = {407-414}, + year = {2011}, + editor = {Gilles Barthe and Alberto Pardo and Gerardo Schneider}, + volume = {7041}, + series = lncs, + month = nov, + publisher = {Springer}, +} + +@inproceedings{Paulson:coinduction2000, + author = {Lawrence C. Paulson}, + title = {A fixedpoint approach to (co)inductive and (co)datatype + definitions}, + booktitle = {Proof, Language, and Interaction}, + year = {2000}, + pages = {187-212}, + crossref = {DBLP:conf/birthday/1999milner}, + bibsource = {DBLP, http://dblp.uni-trier.de} +} +@proceedings{DBLP:conf/birthday/1999milner, + editor = {Gordon D. Plotkin and + Colin Stirling and + Mads Tofte}, + title = {Proof, Language, and Interaction, Essays in Honour of Robin + Milner}, + booktitle = {Proof, Language, and Interaction}, + publisher = {The MIT Press}, + year = {2000}, + isbn = {978-0-262-16188-6}, + bibsource = {DBLP, http://dblp.uni-trier.de} +} + + +@inproceedings{Paulson:coinduction1994, + author = {Lawrence C. Paulson}, + title = {A Fixedpoint Approach to Implementing (Co)Inductive Definitions}, + booktitle = {CADE}, + year = {1994}, + pages = {148-161}, + ee = {http://dx.doi.org/10.1007/3-540-58156-1_11}, + crossref = {DBLP:conf/cade/1994}, + bibsource = {DBLP, http://dblp.uni-trier.de} +} +@proceedings{DBLP:conf/cade/1994, + editor = {Alan Bundy}, + title = {Automated Deduction - CADE-12, 12th International Conference + on Automated Deduction, Nancy, France, June 26 - July 1, + 1994, Proceedings}, + booktitle = {CADE}, + publisher = {Springer}, + series = lncs, + volume = {814}, + year = {1994}, + isbn = {3-540-58156-1}, + bibsource = {DBLP, http://dblp.uni-trier.de} +} + + +@inproceedings{Hausmann:CoCasl, + author = {Daniel Hausmann and + Till Mossakowski and + Lutz Schr{\"o}der}, + title = {Iterative Circular Coinduction for {CoCasl} in {I}sabelle/{HOL}}, + booktitle = {Fundamental Approaches to Software Engineering, 8th International + Conference, FASE 2005}, + editor = {Maura Cerioli}, + series = lncs, + volume = {3442}, + publisher = {Springer}, + year = {2005}, + pages = {341-356}, +} + +@inproceedings{Rosu:CIRC, + author = {Dorel Lucanu and + Grigore Rosu}, + title = {{CIRC}: A Circular Coinductive Prover}, + booktitle = {CALCO}, + year = {2007}, + pages = {372-378}, + ee = {http://dx.doi.org/10.1007/978-3-540-73859-6_25}, + crossref = {DBLP:conf/calco/2007}, + bibsource = {DBLP, http://dblp.uni-trier.de} +} +@proceedings{DBLP:conf/calco/2007, + editor = {Till Mossakowski and + Ugo Montanari and + Magne Haveraaen}, + title = {Algebra and Coalgebra in Computer Science, Second International + Conference, CALCO 2007, Bergen, Norway, August 20-24, 2007, + Proceedings}, + booktitle = {CALCO}, + publisher = {Springer}, + series = lncs, + volume = {4624}, + year = {2007}, + isbn = {978-3-540-73857-2}, + bibsource = {DBLP, http://dblp.uni-trier.de} +} + + +@inproceedings{Rosu:circRule, + author = {Grigore Rosu and + Dorel Lucanu}, + title = {Circular Coinduction: A Proof Theoretical Foundation}, + booktitle = {CALCO}, + year = {2009}, + pages = {127-144}, + ee = {http://dx.doi.org/10.1007/978-3-642-03741-2_10}, + crossref = {DBLP:conf/calco/2009}, + bibsource = {DBLP, http://dblp.uni-trier.de} +} +@proceedings{DBLP:conf/calco/2009, + editor = {Alexander Kurz and + Marina Lenisa and + Andrzej Tarlecki}, + title = {Algebra and Coalgebra in Computer Science, Third International + Conference, CALCO 2009, Udine, Italy, September 7-10, 2009. + Proceedings}, + booktitle = {CALCO}, + publisher = {Springer}, + series = lncs, + volume = {5728}, + year = {2009}, + isbn = {978-3-642-03740-5}, + ee = {http://dx.doi.org/10.1007/978-3-642-03741-2}, + bibsource = {DBLP, http://dblp.uni-trier.de} +} + +@inproceedings{VCC, + author = {Ernie Cohen and + Markus Dahlweid and + Mark A. Hillebrand and + Dirk Leinenbach and + Micha{\l} Moskal and + Thomas Santen and + Wolfram Schulte and + Stephan Tobies}, + title = {{VCC}: A Practical System for Verifying Concurrent {C}}, + booktitle = {TPHOLs 2009}, + series = LNCS, + publisher = {Springer}, + volume = {5674}, + year = {2009}, + pages = {23-42}, +} + +@InProceedings{VeriFast:ProgramsAsProofs, + author = {Bart Jacobs and Jan Smans and Frank Piessens}, + title = {{VeriFast}: Imperative Programs as Proofs}, + booktitle = {VS-Tools workshop at VSTTE 2010}, + year = {2010}, + month = aug, +} + +@book{DijkstraScholten:book, + author = "Edsger W. Dijkstra and Carel S. Scholten", + title = "Predicate Calculus and Program Semantics", + publisher = "Springer-Verlag", + series = "Texts and Monographs in Computer Science", + year = 1990 +} + +@Book{BirdWadler:IntroFunctionalProgramming, + author = {Richard Bird and Philip Wadler}, + title = {Introduction to Functional Programming}, + publisher = {Prentice Hall}, + series = {International Series in Computing Science}, + year = {1992}, +} + +@inproceedings{Z3, + author = "de Moura, Leonardo and Nikolaj Bj{\o}rner", + title = {{Z3}: An efficient {SMT} solver}, + booktitle = {Tools and Algorithms for the Construction and + Analysis of Systems, 14th International Conference, + TACAS 2008}, + editor = {C. R. Ramakrishnan and Jakob Rehof}, + series = lncs, + volume = 4963, + publisher = {Springer}, + year = 2008, + pages = {337-340}, +} + +@phdthesis{moy09phd, + author = {Yannick Moy}, + title = {Automatic Modular Static Safety Checking for C Programs}, + school = {Universit{\'e} Paris-Sud}, + year = 2009, + month = jan, + topics = {team}, + x-equipes = {demons PROVAL}, + x-type = {these}, + x-support = {rapport}, + url = {http://www.lri.fr/~marche/moy09phd.pdf} +} +@Book{PeytonJones:Haskell, + author = {Peyton Jones, Simon}, + title = {Haskell 98 language and libraries: the Revised Report}, + publisher = {Cambridge University Press}, + year = {2003}, +} + +@InProceedings{Park:InfSeq, + author = {David Park}, + title = {Concurrency and automata on infinite sequences}, + booktitle = {Theoretical Computer Science, 5th GI-Conference}, + editor = {Peter Deussen}, + volume = {104}, + series = lncs, + publisher = {Springer}, + year = {1981}, + pages = {167-183}, +} + +@Article{Leroy:CompCert:CACM, + author = {Xavier Leroy}, + title = {Formal verification of a realistic compiler}, + journal = cacm, + volume = {52}, + number = {7}, + month = jul, + year = {2009}, + pages = {107-115}, +} + +@Book{KeY:book, + author = {Bernhard Beckert and Reiner H{\"a}hnle and Peter H. Schmitt}, + title = {Verification of Object-Oriented Software: The {KeY} Approach}, + volume = 4334, + series = lnai, + publisher = {Springer}, + year = 2007, +} + +@Book{Nipkow-Paulson-Menzel02, + author = {Tobias Nipkow and Lawrence Paulson and Markus Menzel}, + title = {{Isabelle/HOL} --- A Proof Assistant for Higher-Order Logic}, + publisher = {Springer}, + year = 2002, + volume = 2283, + series = LNCS, +} + +@Book{Coq:book, + author = {Yves Bertot and Pierre Cast{\'e}ran}, + title = {Interactive Theorem Proving and Program Development --- {C}oq'{A}rt: The Calculus of Inductive Constructions}, + publisher = {Springer}, + year = {2004}, + series = {Texts in Theoretical Computer Science}, +} + +@Book{ACL2:book, + author = {Matt Kaufmann and Panagiotis Manolios and J Strother Moore}, + title = {Computer-Aided Reasoning: An Approach}, + publisher = {Kluwer Academic Publishers}, + year = {2000}, +} + +@Book{BoyerMoore:book, + author = {Robert S. Boyer and J Strother Moore}, + title = {A Computational Logic}, + publisher = {Academic Press}, + series = {ACM Monograph Series}, + year = {1979}, +} + +@article{Bertot:CoinductionInCoq, + location = {http://www.scientificcommons.org/8157029}, + title = {CoInduction in {C}oq}, + author = {Bertot, Yves}, + year = {2005}, + publisher = {HAL - CCSd - CNRS}, + url = {http://hal.inria.fr/inria-00001174/en/}, + institution = {CCSd/HAL : e-articles server (based on gBUS) [http://hal.ccsd.cnrs.fr/oai/oai.php] (France)}, +} + +@InProceedings{Coq:Coinduction, + author = {Eduardo Gim{\'e}nez}, + title = {An Application of Co-inductive Types in {Coq}: Verification of the Alternating Bit Protocol}, + booktitle = {Types for Proofs and Programs, International Workshop TYPES'95}, + pages = {135-152}, + year = {1996}, + editor = {Stefano Berardi and Mario Coppo}, + volume = 1158, + series = lncs, + publisher = {Springer}, +} + +@InProceedings{Boogie:Architecture, + author = "Mike Barnett and Bor-Yuh Evan Chang and Robert DeLine and + Bart Jacobs and K. Rustan M. Leino", + title = "{B}oogie: A Modular Reusable Verifier for Object-Oriented Programs", + booktitle = "Formal Methods for Components and Objects: 4th + International Symposium, FMCO 2005", + editor = "de Boer, Frank S. and Marcello M. Bonsangue and + Susanne Graf and de Roever, Willem-Paul", + series = lncs, + volume = 4111, + publisher = "Springer", + month = sep, + year = 2006, + pages = "364-387" +} + +@InCollection{JacobsRutten:IntroductionCoalgebra, + author = {Bart Jacobs and Jan Rutten}, + title = {An Introduction to (Co)Algebra and (Co)Induction}, + booktitle = {Advanced Topics in Bisimulation and Coinduction}, + editor = {Davide Sangiorgi and Jan Rutten}, + series = {Cambridge Tracts in Theoretical Computer Science}, + number = {52}, + publisher = {Cambridge University Press}, + month = oct, + year = {2011}, + pages = {38-99}, +} + +@Book{Chlipala, + author = {Adam Chlipala}, + title = {Certified Programming with Dependent Types}, + publisher = {MIT Press}, + year = {To appear}, + note = {http://adam.chlipala.net/cpdt/} +} + +@Misc{Charity, + author = {Robin Cockett}, + title = {The {CHARITY} home page}, + howpublished = {\url{http://pll.cpsc.ucalgary.ca/charity1/www/home.html}}, + year = {1996}, +} + +@Article{Tarski:theorem, + author = "Alfred Tarski", + title = "A lattice-theoretical fixpoint theorem and its applications", + journal = "Pacific Journal of Mathematics", + year = 1955, + volume = 5, + pages = "285-309" +} + +@InProceedings{PVS, + author = "Sam Owre and S. Rajan and John M. Rushby and Natarajan + Shankar and Mandayam K. Srivas", + title = "{PVS}: Combining Specification, Proof Checking, and Model + Checking", + editor = "Rajeev Alur and Thomas A. Henzinger", + booktitle = "Computer Aided Verification, 8th International + Conference, CAV '96", + volume = 1102, + series = lncs, + publisher = "Springer", + year = 1996, + pages = "411-414" +} + +@InProceedings{SonnexEtAl:Zeno, + author = {William Sonnex and Sophia Drossopoulou and Susan Eisenbach}, + title = {Zeno: An Automated Prover for Properties of Recursive + Data Structures}, + booktitle = {Tools and Algorithms for the Construction and Analysis of + Systems --- 18th International Conference, TACAS 2012}, + editor = {Cormac Flanagan and Barbara K{\"o}nig}, + volume = {7214}, + series = lncs, + year = {2012}, + month = mar # "--" # apr, + publisher = {Springer}, + pages = {407-421}, +} + +@InProceedings{JohanssonEtAl:IPT2010, + author = {Moa Johansson and Lucas Dixon and Alan Bundy}, + title = {Case-Analysis for {R}ippling and Inductive Proof}, + booktitle = {Interactive Theorem Proving, First International Conference, ITP 2010}, + editor = {Matt Kaufmann and Lawrence C. Paulson}, + volume = {6172}, + series = lncs, + publisher = {Springer}, + month = jul, + year = {2010}, + pages = {291-306}, +} + +@book{Milner:CCS, + author = "Robin Milner", + title = {A Calculus of Communicating Systems}, + year = {1982}, + isbn = {0387102353}, + publisher = {Springer-Verlag New York, Inc.}, +} + +@InProceedings{BoehmeNipkow:Sledgehammer, + author = {Sascha B{\"o}hme and Tobias Nipkow}, + title = {Sledgehammer: {J}udgement {D}ay}, + booktitle = {Automated Reasoning, 5th International Joint Conference, IJCAR 2010}, + editor = {J{\"u}rgen Giesl and Reiner H{\"a}hnle}, + year = {2010}, + pages = {107-121}, + volume = {6173}, + series = lncs, + month = jul, + publisher = {Springer}, +} + +@PhdThesis{Norell:PhD, + author = {Ulf Norell}, + title = {Towards a practical programming language based on dependent type theory}, + school = {Department of Computer Science and Engineering, Chalmers + University of Technology}, + year = {2007}, + month = sep, +} + +@Article{Paulson:MechanizingCoRecursion, + author = {Lawrence C. Paulson}, + title = {Mechanizing Coinduction and Corecursion in Higher-order Logic}, + journal = {Journal of Logic and Computation}, + year = {1997}, + volume = {7}, +} + +@InProceedings{Bertot:sieve, + author = {Yves Bertot}, + title = {Filters on CoInductive Streams, an Application to {E}ratosthenes' Sieve}, + booktitle = {Typed Lambda Calculi and Applications, 7th International Conference, + TLCA 2005}, + editor = {Pawel Urzyczyn}, + series = lncs, + volume = {3461}, + month = apr, + year = {2005}, + pages = {102-115}, + publisher = {Springer}, +} + +@Misc{AltenkirchDanielsson:QuantifierInversion, + author = {Thorsten Altenkirch and Nils Anders Danielsson}, + title = {Termination Checking in the Presence of Nested Inductive and Coinductive Types}, + howpublished = {Short note supporting a talk given at PAR 2010, Workshop on Partiality and Recursion in Interactive Theorem Provers}, + year = {2010}, + note = {Available from \url{http://www.cse.chalmers.se/~nad/publications/}.}, +} + +@InProceedings{HurEtAl:Paco, + author = {Hur, Chung-Kil and Neis, Georg and Dreyer, Derek and Vafeiadis, Viktor}, + title = {The power of parameterization in coinductive proof}, + booktitle = {Proceedings of the 40th Annual ACM SIGPLAN-SIGACT Symposium on Principles of Programming Languages, POPL '13}, + editor = {Roberto Giacobazzi and Radhia Cousot}, + pages = {193--206}, + month = jan, + year = {2013}, + publisher = {ACM}, +} + +@InProceedings{BoveDybjerNorell:BriefAgda, + author = {Ana Bove and Peter Dybjer and Ulf Norell}, + title = {A Brief Overview of {A}gda --- A Functional Language with Dependent Types}, + booktitle = {Theorem Proving in Higher Order Logics, 22nd International Conference, TPHOLs 2009}, + editor = {Stefan Berghofer and Tobias Nipkow and Christian Urban and Makarius Wenzel}, + series = lncs, + volume = {5674}, + publisher = {Springer}, + month = aug, + year = {2009}, + pages = {73-78}, +} + +@Article{Moore:Piton, + author = {J Strother Moore}, + title = {A Mechanically Verified Language Implementation}, + journal = {Journal of Automated Reasoning}, + year = {1989}, + volume = {5}, + number = {4}, + pages = {461-492}, +} + +@InProceedings{Leroy:ESOP2006, + author = {Xavier Leroy}, + title = {Coinductive Big-Step Operational Semantics}, + booktitle = {Programming Languages and Systems, 15th European Symposium on Programming, ESOP 2006}, + pages = {54-68}, + year = {2006}, + editor = {Peter Sestoft}, + volume = {3924}, + series = lncs, + month = mar, + publisher = {Springer}, +} + +@InProceedings{Leino:ITP2013, + author = {K. Rustan M. Leino}, + title = {Automating Theorem Proving with {SMT}}, + booktitle = {Interactive Theorem Proving --- 4th International Conference, ITP 2013}, + year = {2013}, + editor = {Sandrine Blazy and Christine Paulin-Mohring and David Pichardie}, + volume = {7998}, + series = lncs, + pages = {2-16}, + month = jul, + publisher = {Springer}, +} + +@TechReport{TR-version, + author = {K. Rustan M. Leino and Micha{\l} Moskal}, + title = {Co-induction Simply: Automatic Co-inductive Proofs in a Program Verifier}, + institution = {Microsoft Research}, + year = {2013}, + number = {MSR-TR-2013-49}, + month = may, +} diff -Nru dafny-1.9.5/Docs/DafnyRef/poc.bib dafny-1.9.7/Docs/DafnyRef/poc.bib --- dafny-1.9.5/Docs/DafnyRef/poc.bib 1970-01-01 00:00:00.000000000 +0000 +++ dafny-1.9.7/Docs/DafnyRef/poc.bib 2016-06-05 21:11:14.000000000 +0000 @@ -0,0 +1,523 @@ +@string{lncs = "LNCS"} +@string{lnai = "LNAI"} + +@InCollection{LeinoMoskal:UsableProgramVerification, + author = {K. Rustan M. Leino and Micha{\l} Moskal}, + title = {Usable Auto-Active Verification}, + booktitle = {Usable Verification workshop}, + year = {2010}, + editor = {Tom Ball and Lenore Zuck and N. Shankar}, + publisher = {\url{http://fm.csl.sri.com/UV10/}}, +} + booktitle = {UV10 (Usable Verification) workshop}, + month = nov, + +@Book{Coq:book, + author = {Yves Bertot and Pierre Cast{\'e}ran}, + title = {Interactive Theorem Proving and Program Development --- {C}oq'{A}rt: The Calculus of Inductive Constructions}, + publisher = {Springer}, + year = {2004}, +} + series = {Texts in Theoretical Computer Science}, + +@Manual{Isabelle:Guide, + title = {Programming and Proving in {I}sabelle/{HOL}}, + author = {Tobias Nipkow}, + organization = {\url{http://isabelle.informatik.tu-muenchen.de/}}, + year = {2012}, +} + month = may, + +@InProceedings{Leino:Dafny:LPAR16, + author = {K. Rustan M. Leino}, + title = {Dafny: An Automatic Program Verifier for Functional Correctness}, + booktitle = {LPAR-16}, + year = {2010}, + volume = {6355}, + series = lncs, + publisher = {Springer}, + pages = {348-370}, +} + editor = {Edmund M. Clarke and Andrei Voronkov}, + month = apr, + +@InProceedings{VCC:TPHOLs, + author = {Ernie Cohen and Markus Dahlweid and Mark + A. Hillebrand and Dirk Leinenbach and Micha{\l} + Moskal and Thomas Santen and Wolfram Schulte and + Stephan Tobies}, + title = {{VCC}: A Practical System for Verifying Concurrent {C}}, + booktitle = {TPHOLs 2009}, + volume = {5674}, + series = lncs, + publisher = {Springer}, + year = {2009}, + pages = {23-42}, +} + booktitle = {Theorem Proving in Higher Order Logics, 22nd International Conference, TPHOLs 2009}, + editor = {Stefan Berghofer and Tobias Nipkow and Christian + Urban and Makarius Wenzel}, + month = aug, + +@TechReport{VeriFast:TR, + author = {Bart Jacobs and Frank Piessens}, + title = {The {VeriFast} program verifier}, + institution = {Department of Computer Science, Katholieke Universiteit Leuven}, + year = {2008}, + number = {CW-520}, +} + month = aug, + +@InProceedings{VeriFast:ProgramsAsProofs, + author = {Bart Jacobs and Jan Smans and Frank Piessens}, + title = {{VeriFast}: Imperative Programs as Proofs}, + booktitle = {VS-Tools workshop at VSTTE 2010}, + year = {2010}, +} + month = aug, + +@Article{IPL:vol53:3, + author = {Roland Backhouse}, + title = {Special issue on The Calculational Method}, + journal = {Information Processing Letters}, + year = {1995}, + volume = {53}, + number = {3}, + pages = {121-172}, +} + month = feb, + +@InProceedings{VonWright:ExtendingWindowInference, + author = {von Wright, Joakim}, + title = {Extending Window Inference}, + booktitle = {TPHOLs'98}, + pages = {17-32}, + year = {1998}, + volume = {1479}, + series = lncs, + publisher = {Springer}, +} + booktitle = {Theorem Proving in Higher Order Logics, 11th International Conference, TPHOLs'98}, + editor = {Jim Grundy and Malcolm C. Newey}, + +@PhdThesis{Wenzel:PhD, + author = {Markus Wenzel}, + title = {{I}sabelle/{I}sar --- a versatile environment for human-readable formal proof documents}, + school = {Institut f{\"u}r Informatik, Technische Universit{\"a}t M{\"u}nchen}, + year = {2002}, +} + +@InProceedings{BauerWenzel:IsarExperience, + author = {Gertrud Bauer and Markus Wenzel}, + title = {Calculational reasoning revisited: an {I}sabelle/{I}sar experience}, + booktitle = {TPHOLs 2001}, + pages = {75-90}, + year = {2001}, + volume = {2152}, + series = lncs, + publisher = {Springer}, +} + booktitle = {Theorem Proving in Higher Order Logics, 14th International Conference, TPHOLs 2001}, + editor = {Richard J. Boulton and Paul B. Jackson}, + month = sep, + +@InCollection{KoenigLeino:MOD2011, + author = {Jason Koenig and K. Rustan M. Leino}, + title = {Getting Started with {D}afny: A Guide}, + booktitle = {Software Safety and Security: Tools for Analysis and Verification}, + pages = {152-181}, + publisher = {IOS Press}, + year = {2012}, +} + volume = {33}, + series = {NATO Science for Peace and Security Series D: Information and Communication Security}, + editor = {Tobias Nipkow and Orna Grumberg and Benedikt Hauptmann}, + note = {Summer School Marktoberdorf 2011 lecture notes}, + +@InProceedings{Leino:induction, + author = {K. Rustan M. Leino}, + title = {Automating Induction with an {SMT} Solver}, + booktitle = {VMCAI 2012}, + pages = {315-331}, + year = {2012}, + volume = {7148}, + series = lncs, + publisher = {Springer}, +} + booktitle = {Verification, Model Checking, and Abstract Interpretation - 13th International Conference, VMCAI 2012}, + editor = {Viktor Kuncak and Andrey Rybalchenko}, + month = jan, + +@article{Hoare:AxiomaticBasis, + author = "C. A. R. Hoare", + title = "An axiomatic basis for computer programming", + journal = cacm, + volume = 12, + number = 10, + year = 1969, + pages = "576--580,583" +} + month = oct, + +@Article{Simplify:tome, + author = "David Detlefs and Greg Nelson and James B. Saxe", + title = "Simplify: a theorem prover for program checking", + journal = JACM, + volume = 52, + number = 3, + year = 2005, + pages = "365-473", +} + month = may, + +@techreport{Nelson:thesis, + author = "Charles Gregory Nelson", + title = "Techniques for Program Verification", + institution = "Xerox PARC", + year = 1981, + number = "CSL-81-10", + note = "PhD thesis, Stanford University" +} + month = jun, + +@inproceedings{deMouraBjorner:Z3:overview, + author = "de Moura, Leonardo and Nikolaj Bj{\o}rner", + title = {{Z3}: An efficient {SMT} solver}, + booktitle = {Tools and Algorithms for the Construction and + Analysis of Systems, 14th International Conference, + TACAS 2008}, + series = lncs, + volume = 4963, + publisher = {Springer}, + year = 2008, + pages = {337-340}, +} + editor = {C. R. Ramakrishnan and Jakob Rehof}, + month = mar # "--" # apr, + +@InProceedings{LGLM:BVD, + author = {Le Goues, Claire and K. Rustan M. Leino and Micha{\l} Moskal}, + title = {The {B}oogie {V}erification {D}ebugger (Tool Paper)}, + booktitle = {Software Engineering and Formal Methods - 9th International Conference, SEFM 2011}, + pages = {407-414}, + year = {2011}, + volume = {7041}, + series = lncs, + publisher = {Springer}, +} + editor = {Gilles Barthe and Alberto Pardo and Gerardo Schneider}, + month = nov, + +@InProceedings{HipSpec:WING, + author = {Koen Claessen and Moa Johansson and Dan Ros{\'e}n and Nicholas Smallbone}, + title = {{HipSpec}: Automating Inductive Proofs of Program Properties}, + booktitle = {Workshop on {A}utomated {T}heory e{X}ploration: {ATX} 2012}, + year = {2012}, +} + month = jul, + +@InProceedings{HipSpec:CADE, + author = {Koen Claessen and Moa Johansson and Dan Ros{\'e}n and Nicholas Smallbone}, + title = {Automating Inductive Proofs Using Theory Exploration}, + booktitle = {CADE-24}, + pages = {392-406}, + year = {2013}, + volume = {7898}, + series = lncs, + publisher = {Springer}, +} + booktitle = {Automated Deduction --- CADE-24 --- 24th International Conference on Automated Deduction}, + editor = {Maria Paola Bonacina}, + month = jun, + +@article{ManoliosMoore:Calc, + author = {Panagiotis Manolios and J. Strother Moore}, + title = {On the desirability of mechanizing calculational proofs}, + journal = {Inf. Process. Lett.}, + volume = {77}, + number = {2-4}, + year = {2001}, + pages = {173-179}, + ee = {http://dx.doi.org/10.1016/S0020-0190(00)00200-3}, + bibsource = {DBLP, http://dblp.uni-trier.de} +} + +@article{Lifschitz:DS, + title={On Calculational Proofs}, + author={Vladimir Lifschitz}, + volume={113}, + journal={Annals of Pure and Applied Logic}, + pages={207-224}, + url="http://www.cs.utexas.edu/users/ai-lab/pub-view.php?PubID=26805", + year={2002} +} + +@article{BackGrundyWright:SCP, + author = {Ralph Back and Jim Grundy and Joakim von Wright}, + title = {Structured Calculational Proof}, + journal = {Formal Aspects of Computing}, + year = {1997}, + volume = {9}, + number = {5--6}, + pages = {469--483} +} + +@article{Back:SD, + author = {Back, Ralph-Johan}, + title = {Structured derivations: a unified proof style for teaching mathematics}, + journal = {Formal Aspects of Computing}, + issue_date = {September 2010}, + volume = {22}, + number = {5}, + year = {2010}, + issn = {0934-5043}, + pages = {629--661}, + numpages = {33}, + url = {http://dx.doi.org/10.1007/s00165-009-0136-5}, + doi = {10.1007/s00165-009-0136-5}, + acmid = {1858559}, + publisher = {Springer}, +} + month = sep, + +@article{Dijkstra:EWD1300, + author = {Edsger W. Dijkstra}, + title = {{EWD1300}: The Notational Conventions {I} Adopted, and Why}, + journal = {Formal Asp. Comput.}, + volume = {14}, + number = {2}, + year = {2002}, + pages = {99-107}, + ee = {http://dx.doi.org/10.1007/s001650200030}, + bibsource = {DBLP, http://dblp.uni-trier.de} +} + +@inproceedings{BCDJL05:Boogie, + author = {Michael Barnett and + Bor-Yuh Evan Chang and + Robert DeLine and + Bart Jacobs and + K. Rustan M. Leino}, + title = {Boogie: A Modular Reusable Verifier for Object-Oriented + Programs}, + booktitle = {FMCO 2005}, + series = lncs, + volume = 4111, + publisher = "Springer", + year = {2006}, + pages = {364-387}, +} + month = sep, + +@article{BVW:Mathpad, + author = {Roland Backhouse and + Richard Verhoeven and + Olaf Weber}, + title = {Math{$\!\!\int\!\!$}pad: A System for On-Line Preparation of Mathematical + Documents}, + journal = {Software --- Concepts and Tools}, + volume = {18}, + number = {2}, + year = {1997}, + pages = {80-}, + bibsource = {DBLP, http://dblp.uni-trier.de} +} + +@inproceedings{VB:MathpadPVS, + author = {Richard Verhoeven and + Roland Backhouse}, + title = {Interfacing Program Construction and Verification}, + booktitle = {World Congress on Formal Methods}, + year = {1999}, + pages = {1128-1146}, + ee = {http://dx.doi.org/10.1007/3-540-48118-4_10}, + bibsource = {DBLP, http://dblp.uni-trier.de} +} + +@inproceedings{Corbineau:CoqDecl, + author = {Pierre Corbineau}, + title = {A Declarative Language for the {Coq} Proof Assistant}, + booktitle = {TYPES}, + year = {2007}, + pages = {69-84}, + ee = {http://dx.doi.org/10.1007/978-3-540-68103-8_5}, + bibsource = {DBLP, http://dblp.uni-trier.de} +} + +@inproceedings{Wiedijk:Sketches, + author = {Freek Wiedijk}, + title = {Formal Proof Sketches}, + booktitle = {TYPES}, + year = {2003}, + pages = {378-393}, + ee = {http://dx.doi.org/10.1007/978-3-540-24849-1_24}, + crossref = {DBLP:conf/types/2003}, + bibsource = {DBLP, http://dblp.uni-trier.de} +} + +@book{DijkstraScholten:Book, + author = {Edsger W. Dijkstra and + Carel S. Scholten}, + title = {Predicate calculus and program semantics}, + publisher = {Springer}, + series = {Texts and monographs in computer science}, + year = {1990}, + isbn = {978-3-540-96957-0}, + pages = {I-X, 1-220}, + bibsource = {DBLP, http://dblp.uni-trier.de} +} + +@inproceedings{Rudnicki:Mizar, + author = {Piotr Rudnicki}, + title = {An Overview of the {MIZAR} Project}, + booktitle = {University of Technology, Bastad}, + year = {1992}, + pages = {311--332}, + publisher = {} +} + +@inproceedings{ORS:PVS, + AUTHOR = {S. Owre and J. M. Rushby and N. Shankar}, + TITLE = {{PVS:} {A} Prototype Verification System}, + BOOKTITLE = {CADE-11}, + YEAR = {1992}, + SERIES = lnai, + VOLUME = {607}, + PAGES = {748--752}, + PUBLISHER = {Springer}, + URL = {http://www.csl.sri.com/papers/cade92-pvs/} +} + BOOKTITLE = {11th International Conference on Automated Deduction (CADE)}, + EDITOR = {Deepak Kapur}, + +@article{Robinson:Window, + author = {Peter J. Robinson and + John Staples}, + title = {Formalizing a Hierarchical Structure of Practical Mathematical + Reasoning}, + journal = {J. Log. Comput.}, + volume = {3}, + number = {1}, + year = {1993}, + pages = {47-61}, + ee = {http://dx.doi.org/10.1093/logcom/3.1.47}, + bibsource = {DBLP, http://dblp.uni-trier.de} +} + +@article{Grundy:WindowHOL, + author = {Jim Grundy}, + title = {Transformational Hierarchical Reasoning}, + journal = {Comput. J.}, + volume = {39}, + number = {4}, + year = {1996}, + pages = {291-302}, + ee = {http://dx.doi.org/10.1093/comjnl/39.4.291}, + bibsource = {DBLP, http://dblp.uni-trier.de} +} + +@inproceedings{BN:Sledgehammer, + author = {Sascha B{\"o}hme and + Tobias Nipkow}, + title = {Sledgehammer: Judgement Day}, + booktitle = {IJCAR}, + year = {2010}, + pages = {107-121}, + ee = {http://dx.doi.org/10.1007/978-3-642-14203-1_9}, + bibsource = {DBLP, http://dblp.uni-trier.de} +} + +@inproceedings{Armand:CoqSMT, + author = {Micha{\"e}l Armand and + Germain Faure and + Benjamin Gr{\'e}goire and + Chantal Keller and + Laurent Th{\'e}ry and + Benjamin Werner}, + title = {A Modular Integration of {SAT}/{SMT} Solvers to {Coq} through + Proof Witnesses}, + booktitle = {CPP}, + year = {2011}, + pages = {135-150}, + ee = {http://dx.doi.org/10.1007/978-3-642-25379-9_12}, + bibsource = {DBLP, http://dblp.uni-trier.de} +} + +@inproceedings{Besson:CoqSMTReflexive, + author = {Fr{\'e}d{\'e}ric Besson and + Pierre-Emmanuel Cornilleau and + David Pichardie}, + title = {Modular SMT Proofs for Fast Reflexive Checking Inside Coq}, + booktitle = {CPP}, + year = {2011}, + pages = {151-166}, + ee = {http://dx.doi.org/10.1007/978-3-642-25379-9_13}, + crossref = {DBLP:conf/cpp/2011}, + bibsource = {DBLP, http://dblp.uni-trier.de} +} + +@Book{ACL2:book, + author = {Matt Kaufmann and Panagiotis Manolios and J Strother Moore}, + title = {Computer-Aided Reasoning: An Approach}, + publisher = {Kluwer Academic Publishers}, + year = {2000}, +} + +@inproceedings{boogie11why3, + author = {Fran\c{c}ois Bobot and Jean-Christophe Filli\^atre and Claude March\'e and Andrei Paskevich}, + title = {{Why3}: Shepherd Your Herd of Provers}, + booktitle = {BOOGIE 2011: Workshop on Intermediate Verification Languages}, + year = 2011, + pages = {53--64}, + url = {http://proval.lri.fr/publications/boogie11final.pdf}, +} + booktitle = {BOOGIE 2011: First International Workshop on Intermediate Verification Languages}, + month = aug, + +@InProceedings{zeno, + author = {William Sonnex and Sophia Drossopoulou and Susan Eisenbach}, + title = {Zeno: An Automated Prover for Properties of Recursive + Data Structures}, + booktitle = {TACAS}, + volume = {7214}, + series = lncs, + year = {2012}, + publisher = {Springer}, + pages = {407-421}, +} + booktitle = {Tools and Algorithms for the Construction and Analysis of + Systems --- 18th International Conference, TACAS 2012}, + editor = {Cormac Flanagan and Barbara K{\"o}nig}, + month = mar # "--" # apr, + +@InProceedings{Chisholm:CalculationByComputer, + author = {P. Chisholm}, + title = {Calculation by computer}, + booktitle = {Third International Workshop on Software Engineering and its Applications}, + address = {Toulouse, France}, + year = {1990}, + month = dec, + pages = {713-728}, +} + +@TechReport{VanDeSnepscheut:Proxac, + author = {van de Snepscheut, Jan L. A.}, + title = {Proxac: an editor for program transformation}, + institution = {Caltech}, + year = {1993}, + number = {CS-TR-93-33}, +} + +@InProceedings{VanGasterenBijlsma:CalcExtension, + author = {A. J. M. van Gasteren and A. Bijlsma}, + title = {An extension of the program derivation format}, + booktitle = {PROCOMET '98}, + pages = {167-185}, + year = {1998}, + publisher = {IFIP Conference Proceedings}, +} + booktitle = {Programming Concepts and Methods, IFIP TC2/WG2.2,2.3 International Conference on + Programming Concepts and Methods (PROCOMET '98)}, + editor = {David Gries and Willem P. de Roever}, + month = jun, + diff -Nru dafny-1.9.5/Docs/DafnyRef/references.bib dafny-1.9.7/Docs/DafnyRef/references.bib --- dafny-1.9.5/Docs/DafnyRef/references.bib 1970-01-01 00:00:00.000000000 +0000 +++ dafny-1.9.7/Docs/DafnyRef/references.bib 2016-06-05 21:11:14.000000000 +0000 @@ -0,0 +1,1446 @@ +@InCollection{Leino:Dafny:MOD2008, + author = {K. Rustan M. Leino}, + title = {Specification and verification of object-oriented software}, + booktitle = {Engineering Methods and Tools for Software Safety and Security}, + pages = {231-266}, + publisher = {IOS Press}, + year = {2009}, + editor = {Manfred Broy and Wassiou Sitou and Tony Hoare}, + volume = {22}, + series = {NATO Science for Peace and Security Series D: Information and Communication Security}, + note = {Summer School Marktoberdorf 2008 lecture notes}, +} + +@InCollection{LeinoSchulte:MOD2006, + author = {K. Rustan M. Leino and Wolfram Schulte}, + title = {A verifying compiler for a multi-threaded object-oriented language}, + booktitle = {Software Safety and Security}, + pages = {351-416}, + publisher = {IOS Press}, + year = {2007}, + editor = {Manfred Broy and Johannes Gr{\"u}nbauer and Tony Hoare}, + volume = {9}, + series = {NATO Science for Peace and Security Series D: Information and Communication Security}, + note = {Summer School Marktoberdorf 2006 lecture notes}, +} + +@techreport{ESC:rr, + author = "David L. Detlefs and K. Rustan M. Leino and Greg Nelson + and James B. Saxe", + title = "Extended static checking", + institution = "Compaq Systems Research Center", + month = dec, + year = 1998, + type = "Research Report", + number = 159 +} + +@Article{Simplify:tome, + author = "David Detlefs and Greg Nelson and James B. Saxe", + title = "Simplify: a theorem prover for program checking", + journal = JACM, + volume = 52, + number = 3, + month = may, + year = 2005, + pages = "365-473", +} + +@InProceedings{Doomed:FM2009, + author = {Jochen Hoenicke and K. Rustan M. Leino and Andreas + Podelski and Martin Sch{\"a}f and Thomas Wies}, + title = {It's Doomed; We Can Prove It}, + booktitle = {FM 2009: Formal Methods, Second World Congress}, + editor = {Ana Cavalcanti and Dennis Dams}, + volume = 5850, + series = lncs, + publisher = {Springer}, + month = nov, + year = 2009, + pages = {338-353}, +} + +@InProceedings{Regis-Gianas:Pottier:MPC2008, + author = {Yann R{\'e}gis-Gianas and Fran{\c{c}}ois Pottier}, + title = {A {Hoare} Logic for Call-by-Value Functional Programs}, + booktitle = {Mathematics of Program Construction, 9th International Conference}, + editor = {Philippe Audebaud and Christine Paulin-Mohring}, + volume = {5133}, + series = lncs, + publisher = {Springer}, + month = jul, + year = {2008}, + pages = {305-335}, +} + +@InProceedings{ZeeKuncakRinard:PLDI2008, + author = {Karen Zee and Viktor Kuncak and Martin C. Rinard}, + title = {Full functional verification of linked data structures}, + booktitle = {Proceedings of the ACM SIGPLAN 2008 Conference on + Programming Language Design and Implementation}, + editor = {Rajiv Gupta and Saman P. Amarasinghe}, + year = {2008}, + month = jun, + publisher = {ACM}, + pages = {349-361}, +} + +@InProceedings{VCC:TPHOLs, + author = {Ernie Cohen and Markus Dahlweid and Mark + A. Hillebrand and Dirk Leinenbach and Micha{\l} + Moskal and Thomas Santen and Wolfram Schulte and + Stephan Tobies}, + title = {{VCC}: A Practical System for Verifying Concurrent {C}}, + booktitle = {Theorem Proving in Higher Order Logics, 22nd + International Conference, TPHOLs 2009}, + editor = {Stefan Berghofer and Tobias Nipkow and Christian + Urban and Makarius Wenzel}, + volume = {5674}, + series = lncs, + publisher = {Springer}, + year = {2009}, + month = aug, + pages = {23-42}, +} + +@InProceedings{seL4:SOSP2009, + author = {Gerwin Klein and Kevin Elphinstone and Gernot Heiser + and June Andronick and David Cock and Philip Derrin + and Dhammika Elkaduwe and Kai Engelhardt and Rafal + Kolanski and Michael Norrish and Thomas Sewell and + Harvey Tuch and Simon Winwood}, + title = {{seL4}: formal verification of an {OS} kernel}, + booktitle = {Proceedings of the 22nd ACM Symposium on Operating + Systems Principles 2009, SOSP 2009}, + editor = {Jeanna Neefe Matthews and Thomas E. Anderson}, + publisher = {ACM}, + month = oct, + year = {2009}, + pages = {207-220}, +} + +@book{Meyer:OOP, + author = "Bertrand Meyer", + title = "Object-oriented Software Construction", + publisher = "Prentice-Hall International", + series = "Series in Computer Science", + year = 1988 +} + +@InProceedings{SpecSharp:Overview, + author = {Mike Barnett and K. Rustan M. Leino and Wolfram Schulte}, + title = {The {Spec\#} Programming System: An Overview}, + booktitle = {{CASSIS 2004}, Construction and Analysis of Safe, + Secure and Interoperable Smart devices}, + editor = "Gilles Barthe and Lilian Burdy and Marieke Huisman and + Jean-Louis Lanet and Traian Muntean", + series = lncs, + volume = 3362, + publisher = "Springer", + year = 2005, + pages = "49-69" +} + +@InProceedings{Kassios:FM2006, + author = "Ioannis T. Kassios", + title = "Dynamic Frames: Support for Framing, Dependencies and Sharing Without Restrictions", + booktitle = "FM 2006: Formal Methods, 14th International Symposium on Formal Methods", + editor = "Jayadev Misra and Tobias Nipkow and Emil Sekerinski", + series = lncs, + volume = 4085, + publisher = "Springer", + month = aug, + year = 2006, + pages = "268-283", +} + +@InProceedings{Boogie:Architecture, + author = "Mike Barnett and Bor-Yuh Evan Chang and Robert DeLine and + Bart Jacobs and K. Rustan M. Leino", + title = "{B}oogie: A Modular Reusable Verifier for Object-Oriented Programs", + booktitle = "Formal Methods for Components and Objects: 4th + International Symposium, FMCO 2005", + editor = "de Boer, Frank S. and Marcello M. Bonsangue and + Susanne Graf and de Roever, Willem-Paul", + series = lncs, + volume = 4111, + publisher = "Springer", + month = sep, + year = 2006, + pages = "364-387" +} + +@Misc{Leino:Boogie2-RefMan, + author = {K. Rustan M. Leino}, + title = {This is {B}oogie 2}, + howpublished = {Manuscript KRML 178}, + year = 2008, + note = "Available at \url{http://research.microsoft.com/~leino/papers.html}", +} + +@inproceedings{deMouraBjorner:Z3:overview, + author = "de Moura, Leonardo and Nikolaj Bj{\o}rner", + title = {{Z3}: An efficient {SMT} solver}, + booktitle = {Tools and Algorithms for the Construction and + Analysis of Systems, 14th International Conference, + TACAS 2008}, + editor = {C. R. Ramakrishnan and Jakob Rehof}, + series = lncs, + volume = 4963, + publisher = {Springer}, + month = mar # "--" # apr, + year = 2008, + pages = {337-340}, +} + +@InProceedings{Gonthier:CAV2006, + author = {Georges Gonthier}, + title = {Verifying the safety of a practical concurrent + garbage collector}, + booktitle = {Computer Aided Verification, 8th International Conference, CAV '96}, + editor = {Rajeev Alur and Thomas A. Henzinger}, + volume = {1102}, + series = lncs, + publisher = {Springer}, + month = jul # "--" # aug, + year = {1996}, + pages = {462-465}, +} + +@Article{CLIncStack, + author = {William R. Bevier and Hunt, Jr., Warren A. and + J Strother Moore and William D. Young}, + title = {Special issue on system verification}, + journal = {Journal of Automated Reasoning}, + volume = {5}, + number = {4}, + month = dec, + year = {1989}, + pages = {409-530}, +} + +@InProceedings{ParkinsonBierman:POPL2005, + author = {Matthew J. Parkinson and Gavin M. Bierman}, + title = {Separation logic and abstraction}, + booktitle = {Proceedings of the 32nd ACM SIGPLAN-SIGACT Symposium + on Principles of Programming Languages, POPL 2005}, + publisher = {ACM}, + month = jan, + year = {2005}, + pages = {247-258}, +} + +@InProceedings{Weide:VSTTE2008, + author = {Bruce W. Weide and Murali Sitaraman and Heather + K. Harton and Bruce Adcock and Paolo Bucci and + Derek Bronish and Wayne D. Heym and Jason + Kirschenbaum and David Frazier}, + title = {Incremental Benchmarks for Software Verification Tools and Techniques}, + booktitle = {Verified Software: Theories, Tools, Experiments, + Second International Conference, VSTTE 2008}, + editor = {Natarajan Shankar and Jim Woodcock}, + volume = {5295}, + series = lncs, + publisher = {Springer}, + month = oct, + year = {2008}, + pages = {84-98}, +} + +@Article{SchorrWaite:CACM1967, + author = {H. Schorr and W. M. Waite}, + title = {An Efficient Machine-Independent Procedure for + Garbage Collection in Various List Structures}, + journal = cacm, + volume = {10}, + number = {8}, + month = aug, + year = {1967}, + pages = {501-506}, +} + +@phdthesis{Leino:thesis, + author = "K. Rustan M. Leino", + title = "Toward Reliable Modular Programs", + school = {California Institute of Technology}, + year = 1995, + note = "Technical Report Caltech-CS-TR-95-03." +} + +@inproceedings{Boyland:SAS2003, + author = {John Boyland}, + title = {Checking Interference with Fractional Permissions}, + booktitle = "Static Analysis, 10th International Symposium, SAS 2003", + editor = {Radhia Cousot}, + series = lncs, + volume = 2694, + publisher = "Springer", + year = 2003, + pages = {55-72} +} + +@InProceedings{Reynolds:SepLogic, + author = {John C. Reynolds}, + title = {Separation Logic: A Logic for Shared Mutable Data Structures}, + booktitle = {17th IEEE Symposium on Logic in Computer Science (LICS 2002)}, + publisher = {IEEE Computer Society}, + year = {2002}, + month = jul, + pages = {55-74}, +} + +@InProceedings{Clarke-Drossopoulou02, + author = {Dave Clarke and Sophia Drossopoulou}, + title = {Ownership, encapsulation and the disjointness of + type and effect}, + booktitle = {Proceedings of the 2002 ACM SIGPLAN Conference on + Object-Oriented Programming Systems, Languages and + Applications, OOPSLA 2002}, + publisher = {ACM}, + Month = nov, + Year = 2002, + pages = {292--310}, +} + +@InProceedings{FAP:OOPSLA1998, + author = {Dave Clarke and John Potter and James Noble}, + title = {Ownership Types for Flexible Alias Protection}, + booktitle = {Proceedings of the 1998 ACM SIGPLAN Conference on + Object-Oriented Programming Systems, Languages \& + Applications (OOPSLA '98)}, + publisher = {ACM}, + month = oct, + year = {1998}, + pages = {48-64}, +} + +@Article{LeinoNelson:tome, + author = "K. Rustan M. Leino and Greg Nelson", + title = "Data abstraction and information hiding", + journal = toplas, + month = sep, + year = 2002, + volume = 24, + number = 5, + pages = "491-553" +} + +@PhdThesis{Darvas:thesis, + author = {{\'A}d{\'a}m P{\'e}ter Darvas}, + title = {Reasoning About Data Abstraction in Contract Languages}, + school = {ETH Zurich}, + year = {2009}, + note = {Diss. ETH No. 18622}, +} + +@InProceedings{SmansEtAl:VeriCool, + author = {Jan Smans and Bart Jacobs and Frank Piessens and Wolfram Schulte}, + title = {Automatic Verifier for {J}ava-Like Programs Based on Dynamic Frames}, + booktitle = {Fundamental Approaches to Software Engineering, 11th + International Conference, FASE 2008}, + editor = {Jos{\'e} Luiz Fiadeiro and Paola Inverardi}, + volume = {4961}, + series = lncs, + publisher = {Springer}, + month = mar # "--" # apr, + year = {2008}, + pages = {261-275}, +} + +@inproceedings{Why:Platform, + author = {Jean-Christophe Filli{\^a}tre and Claude March{\'e}}, + title = {The {Why}/{Krakatoa}/{Caduceus} Platform for Deductive Program Verification}, + booktitle = {Computer Aided Verification, 19th International Conference, CAV 2007}, + editor = {Werner Damm and Holger Hermanns}, + volume = {4590}, + series = lncs, + publisher = {Springer}, + month = jul, + year = {2007}, + pages = {173--177} +} + +@InProceedings{BarrettTinelli:CVC3, + author = {Clark Barrett and Cesare Tinelli}, + title = {{CVC3}}, + booktitle = {Computer Aided Verification, 19th International Conference, CAV 2007}, + editor = {Werner Damm and Holger Hermanns}, + volume = {4590}, + series = lncs, + publisher = {Springer}, + month = jul, + year = {2007}, + pages = {298-302}, +} + +@InProceedings{HubertMarche:SchorrWaite, + author = {Thierry Hubert and Claude March{\'e}}, + title = {A case study of {C} source code verification: the + {S}chorr-{W}aite algorithm}, + booktitle = {Third IEEE International Conference on Software + Engineering and Formal Methods (SEFM 2005)}, + editor = {Bernhard K. Aichernig and Bernhard Beckert}, + publisher = {IEEE Computer Society }, + month = sep, + year = {2005}, + pages = {190-199}, +} + +@Article{BroyPepper:SchorrWaite, + author = {Manfred Broy and Peter Pepper}, + title = {Combining Algebraic and Algorithmic Reasoning: An + Approach to the {S}chorr-{W}aite Algorithm}, + journal = toplas, + volume = {4}, + number = {3}, + month = jul, + year = {1982}, + pages = {362-381}, +} + +@Article{MehtaNipkow:SchorrWaite, + author = {Farhad Mehta and Tobias Nipkow}, + title = {Proving pointer programs in higher-order logic}, + journal = {Information and Computation}, + year = {2005}, + volume = {199}, + number = {1--2}, + pages = {200-227}, + month = may # "--" # jun, +} + +@InProceedings{Abrial:SchorrWaite, + author = {Jean-Raymond Abrial}, + title = {Event Based Sequential Program Development: + Application to Constructing a Pointer Program}, + booktitle = {FME 2003: Formal Methods, International Symposium of + Formal Methods Europe}, + editor = {Keijiro Araki and Stefania Gnesi and Dino Mandrioli}, + volume = {2805}, + series = lncs, + publisher = {Springer}, + month = sep, + year = {2003}, + pages = {51-74}, +} + +@InCollection{Bubel:SchorrWaite, + author = {Richard Bubel}, + title = {The Schorr-Waite-Algorithm}, + booktitle = {Verification of Object-Oriented Software: The {KeY} Approach}, + crossref = {KeY:book}, + chapter = {15}, +} + +@InProceedings{BanerjeeEtAl:RegionLogic, + author = {Anindya Banerjee and David A. Naumann and Stan Rosenberg}, + title = {Regional Logic for Local Reasoning about Global Invariants}, + booktitle = {ECOOP 2008 --- Object-Oriented Programming, 22nd European Conference}, + editor = {Jan Vitek}, + series = lncs, + volume = 5142, + publisher = {Springer}, + month = jul, + year = 2008, + pages = {387-411}, +} + +@Book{Abrial:BBook, + author = "J.-R. Abrial", + title = "The {B}-Book: Assigning Programs to Meanings", + publisher = "Cambridge University Press", + year = 1996 +} + +@Book{Abrial:EventB:book, + author = {Jean-Raymond Abrial}, + title = {Modeling in {Event-B}: System and Software Engineering}, + publisher = {Cambridge University Press}, + year = {2010}, +} + +@Article{MisraCook:Orc, + author = {Jayadev Misra and William R. Cook}, + title = {Computation Orchestration: A Basis for Wide-Area Computing}, + journal = {Software and Systems Modeling}, + year = {2007}, + volume = 6, + number = 1, + pages = {83-110}, + month = mar, +} + +@Book{Jackson:Alloy:book, + author = {Daniel Jackson}, + title = {Software Abstractions: Logic, Language, and Analysis}, + publisher = {MIT Press}, + year = {2006}, +} + +@InProceedings{JacksonEtAl:Formula, + author = {Ethan K. Jackson and Dirk Seifert and Markus + Dahlweid and Thomas Santen and Nikolaj Bj{\o}rner + and Wolfram Schulte}, + title = {Specifying and Composing Non-functional Requirements + in Model-Based Development}, + booktitle = {Proceedings of the 8th International Conference on + Software Composition}, + pages = {72-89}, + year = {2009}, + editor = {Alexandre Bergel and Johan Fabry}, + series = lncs, + volume = {5634}, + month = jul, + publisher = {Springer}, +} + +@InProceedings{HarelEtAl:PlayInPlayOut, + author = {David Harel and Hillel Kugler and Rami Marelly and + Amir Pnueli}, + title = {Smart {P}lay-out of Behavioral Requirements}, + booktitle = {Formal Methods in Computer-Aided Design, 4th + International Conference, FMCAD 2002}, + pages = {378-398}, + year = {2002}, + editor = {Mark Aagaard and John W. O'Leary}, + volume = {2517}, + series = lncs, + month = nov, + publisher = {Springer}, +} + +@Article{Smith:KIDS-overview, + author = "Douglas R. Smith", + title = "{KIDS}: A Semi-Automatic Program Development System", + journal = {IEEE Transactions on Software Engineering }, + volume = 16, + number = 9, + month = sep, + year = 1990, + pages = "1024-1043", +} + +@article{Hoare:DataRepresentations, + author = "C. A. R. Hoare", + title = "Proof of correctness of data representations", + journal = acta, + volume = 1, + number = 4, + year = 1972, + pages = "271-281" +} + +@InProceedings{Abrial:FM-in-practice, + author = {Jean-Raymond Abrial}, + title = {Formal methods in industry: achievements, problems, future}, + booktitle = {28th International Conference on Software Engineering (ICSE 2006)}, + editor = {Leon J. Osterweil and H. Dieter Rombach and Mary Lou Soffa}, + month = may, + year = {2006}, + publisher = {ACM}, + pages = {761-768}, +} + +@InProceedings{MartinEtAl:AsynchMIPS, + author = {Alain J. Martin and Andrew Lines and Rajit Manohar + and Mika Nystr{\"o}m and Paul I. P{\'e}nzes and + Robert Southworth and Uri Cummings}, + title = {The Design of an Asynchronous MIPS R3000 Microprocessor}, + booktitle = {17th Conference on Advanced Research in VLSI (ARVLSI '97}}, + month = sep, + year = {1997}, + publisher = {IEEE Computer Society}, + pages = {164-181}, +} + +@InProceedings{BallEtAll:ScalableChecking, + author = {Thomas Ball and Brian Hackett and Shuvendu K. Lahiri + and Shaz Qadeer and Julien Vanegue}, + title = {Towards Scalable Modular Checking of User-Defined Properties}, + booktitle = {Verified Software: Theories, Tools, Experiments, + (VSTTE 2010)}, + editor = {Gary T. Leavens and Peter O'Hearn and Sriram K. Rajamani}, + volume = {6217}, + series = lncs, + publisher = {Springer}, + month = aug, + year = {2010}, + pages = {1-24}, +} + +@InProceedings{RegisGianasPottier:FunctionalHoare, + author = {Yann R{\'e}gis-Gianas and Fran{\,c}ois Pottier}, + title = {A {H}oare Logic for Call-by-Value Functional Programs}, + booktitle = {Mathematics of Program Construction, 9th International Conference, MPC 2008}, + pages = {305-335}, + year = {2008}, + editor = {Philippe Audebaud and Christine Paulin-Mohring}, + volume = {5133}, + series = lncs, + month = jul, + publisher = {Springer}, +} + +@InProceedings{VeanesEtAl:SpecExplorer, + author = {Margus Veanes and Colin Campbell and Wolfgang + Grieskamp and Wolfram Schulte and Nikolai Tillmann + and Lev Nachmanson}, + title = {Model-Based Testing of Object-Oriented Reactive + Systems with {Spec} {Explorer}}, + booktitle = {Formal Methods and Testing}, + pages = {39-76}, + year = {2008}, + editor = {Robert M. Hierons and Jonathan P. Bowen and Mark Harman}, + volume = {4949}, + series = lncs, + publisher = {Springer}, +} + +@book{Dijkstra:Discipline, + author = "Edsger W. Dijkstra", + title = "A Discipline of Programming", + publisher = "Prentice Hall", + address = "Englewood Cliffs, NJ", + year = 1976 +} + +@InProceedings{LeinoMueller:ESOP2009, + author = {K. Rustan M. Leino and Peter M{\"u}ller}, + title = {A Basis for Verifying Multi-threaded Programs}, + booktitle = {Programming Languages and Systems, 18th European + Symposium on Programming, ESOP 2009}, + editor = {Giuseppe Castagna}, + volume = {5502}, + series = lncs, + publisher = {Springer}, + month = mar, + year = 2009, + pages = {378-393}, +} + +@InProceedings{LeinoRuemmer:Boogie2, + author = {K. Rustan M. Leino and Philipp R{\"u}mmer}, + title = {A Polymorphic Intermediate Verification Language: + Design and Logical Encoding}, + booktitle = {Tools and Algorithms for the Construction and + Analysis of Systems, 16th International Conference, + TACAS 2010}, + editor = {Javier Esparza and Rupak Majumdar}, + series = lncs, + volume = 6015, + publisher = {Springer}, + month = mar, + year = 2010, + pages = {312-327}, +} + +@book{LiskovGuttag:book, + author = "Barbara Liskov and John Guttag", + title = "Abstraction and Specification in Program Development", + publisher = "MIT Press", + series = "MIT Electrical Engineering and Computer Science Series", + year = 1986 +} + +@TechReport{DahlEtAl:Simula67, + author = {Ole-Johan Dahl and Bj{\o}rn Myhrhaug and Kristen Nygaard}, + title = {Common Base Language}, + institution = {Norwegian Computing Center}, + type = {Publication}, + number = {S-22}, + month = oct, + year = 1970, +} + +@inproceedings{LeinoMueller:ModelFields, + author = {K. Rustan M. Leino and + Peter M{\"u}ller}, + title = {A Verification Methodology for Model Fields}, + booktitle = "Programming Languages and Systems, 15th European Symposium on Programming, ESOP 2006", + editor = "Peter Sestoft", + series = lncs, + volume = 3924, + publisher = "Springer", + month = mar, + year = 2006, + pages = {115-130}, +} + +@InProceedings{CarterEtAl:UsingPerfectDeveloper, + author = {Gareth Carter and Rosemary Monahan and Joseph M. Morris}, + title = {Software Refinement with {P}erfect {D}eveloper}, + booktitle = {Third IEEE International Conference on Software + Engineering and Formal Methods (SEFM 2005)}, + pages = {363-373}, + editor = {Bernhard K. Aichernig and Bernhard Beckert}, + month = sep, + year = {2005}, + publisher = {IEEE Computer Society}, +} + +@InProceedings{Abrial:SchorrWaite, + author = {Jean-Raymond Abrial}, + title = {Event Based Sequential Program Development: + Application to Constructing a Pointer Program}, + booktitle = {FME 2003: Formal Methods, International Symposium of + Formal Methods Europe}, + editor = {Keijiro Araki and Stefania Gnesi and Dino Mandrioli}, + volume = {2805}, + series = lncs, + publisher = {Springer}, + month = sep, + year = {2003}, + pages = {51-74}, +} + +@article{Barnett-etal04, + author = {Mike Barnett and Robert DeLine and Manuel F{\"a}hndrich and + K. Rustan M. Leino and Wolfram Schulte}, + title = {Verification of Object-Oriented Programs with Invariants}, + journal = {Journal of Object Technology}, + volume = 3, + number = 6, + year = 2004, + pages = {27-56}, +} + +@InProceedings{SmansEtAl:ImplicitDynamicFrames, + author = {Jan Smans and Bart Jacobs and Frank Piessens}, + title = {Implicit Dynamic Frames: Combining Dynamic Frames + and Separation Logic}, + booktitle = {ECOOP 2009 --- Object-Oriented Programming, 23rd + European Conference}, + editor = {Sophia Drossopoulou}, + volume = {5653}, + series = lncs, + publisher = {Springer}, + month = jul, + year = {2009}, + pages = {148-172}, +} + +@inproceedings{GriesPrins:Encapsulation, + author = "David Gries and Jan Prins", + title = "A New Notion of Encapsulation", + booktitle = "Proceedings of the {ACM} {SIGPLAN} 85 + Symposium on Language Issues in Programming Environments", + publisher = "ACM", + series = "SIGPLAN Notices 20", + number = 7, + month = jul, + year = 1985, + pages = "131-139" +} + +@InProceedings{YangHawblitzel:Verve, + author = {Jean Yang and Chris Hawblitzel}, + title = {Safe to the last instruction: automated verification of a type-safe operating system}, + booktitle = {Proceedings of the 2010 ACM SIGPLAN Conference on + Programming Language Design and Implementation, PLDI + 2010}, + editor = {Benjamin G. Zorn and Alexander Aiken}, + month = jun, + year = {2010}, + publisher = {ACM}, + pages = {99-110}, +} + +@Book{BoyerMoore:book, + author = {Robert S. Boyer and J Strother Moore}, + title = {A Computational Logic}, + publisher = {Academic Press}, + series = {ACM Monograph Series}, + year = {1979}, +} + +@article{HoareWirth:Pascal, + author = "C. A. R. Hoare and N. Wirth", + title = "An axiomatic definition of the programming language {PASCAL}", + journal = acta, + volume = 2, + number = 4, + year = 1973, + pages = "335-355" +} + +@article{Hoare:AxiomaticBasis, + author = "C. A. R. Hoare", + title = "An axiomatic basis for computer programming", + journal = cacm, + volume = 12, + number = 10, + year = 1969, + month = oct, + pages = "576--580,583" +} + +@InProceedings{LeinoMoskal:vacid0-notYetConfirmed, + author = {K. Rustan M. Leino and Micha{\l} Moskal}, + title = {{VACID-0}: {V}erification of {A}mple {C}orrectness + of {I}nvariants of {D}ata-structures, Edition 0}, + booktitle = {VS-Tools & Experiments}, + year = 2010, + editor = {Rajeev Joshi and Tiziana Margaria and Peter + M{\"u}ller and David Naumann and Hongseok Yang}, + series = {VSTTE 2010 Workshop Proceedings}, + publisher = {ETH Zurich Technical Report 676}, + month = aug, +} + +@InCollection{Chalice:tutorial, + author = {K. Rustan M. Leino and Peter M{\"u}ller and Jan Smans}, + title = {Verification of Concurrent Programs with {C}halice}, + booktitle = {Foundations of Security Analysis and Design {V}: {FOSAD} 2007/2008/2009 Tutorial Lectures}, + editor = {Alessandro Aldini and Gilles Barthe and Roberto Gorrieri}, + volume = {5705}, + series = lncs, + publisher = {Springer}, + year = {2009}, + pages = {195-222} +} + +@inproceedings{LeinoMuellerSmans10, + author = {K. Rustan M. Leino and Peter M{\"u}ller and Jan Smans}, + title = {Deadlock-Free Channels and Locks}, + booktitle = {Programming Languages and Systems, 19th European Symposium on Programming, ESOP 2010}, + editor = {Andrew D. Gordon}, + volume = {6012}, + series = lncs, + publisher = {Springer}, + month = mar, + year = {2010}, + pages = {407-426} +} + +@Book{BundyEtAl:Rippling, + author = {Alan Bundy and David Basin and Dieter Hutter and Andrew Ireland}, + title = {Rippling: Meta-level Guidance for Mathematical Reasoning}, + publisher = {Cambridge University Press}, + volume = {56}, + series = {Cambridge Tracts in Theoretical Computer Science}, + year = {2005}, +} + +@book{Gries:Science, + author = "David Gries", + title = "The Science of Programming", + publisher = "Springer-Verlag", + series = "Texts and Monographs in Computer Science", + year = 1981 +} + +@Book{DijkstraFeijen:Book, + author = "Edsger W. Dijkstra and W. H. J. Feijen", + title = "A Method of Programming", + publisher = "Addison-Wesley", + month = jul, + year = 1988, +} + +@book{Kaldewaij:Programming, + author = "Anne Kaldewaij", + title = "Programming: The Derivation of Algorithms", + publisher = "Prentice-Hall International", + year = 1990, + series = "Series in Computer Science", +} + +@InProceedings{LeinoMonahan:VSTTE2010, + author = {K. Rustan M. Leino and Rosemary Monahan}, + title = {Dafny Meets the Verification Benchmarks Challenge}, + booktitle = {Verified Software: Theories, Tools, Experiments, + Third International Conference, VSTTE 2010}, + pages = {112-126}, + year = {2010}, + editor = {Gary T. Leavens and Peter W. O'Hearn and Sriram K. Rajamani}, + volume = {6217}, + series = lncs, + month = aug, + publisher = {Springer}, +} + +@InProceedings{VSComp2010:report, + author = {Vladimir Klebanov and Peter M{\"u}ller and Natarajan Shankar and + Gary T. Leavens and Valentin W{\"u}stholz and Eyad Alkassar and + Rob Arthan and Derek Bronish and Rod Chapman and Ernie Cohen and + Mark Hillebrand and Bart Jacobs and K. Rustan M. Leino and + Rosemary Monahan and Frank Piessens and Nadia Polikarpova and + Tom Ridge and Jan Smans and Stephan Tobies and Thomas Tuerk and + Mattias Ulbrich and Benjamin Wei{\ss}}, + title = {The 1st Verified Software Competition: Experience Report}, + booktitle = {FM 2011: Formal Methods --- 17th International + Symposium on Formal Methods}, + pages = {154-168}, + year = {2011}, + editor = {Michael Butler and Wolfram Schulte}, + volume = {6664}, + series = lncs, + month = jun, + publisher = {Springer}, +} + +@InProceedings{Leino:Dafny:LPAR16, + author = {K. Rustan M. Leino}, + title = {Dafny: An Automatic Program Verifier for Functional Correctness}, + booktitle = {LPAR-16}, + year = {2010}, + volume = {6355}, + series = lncs, + publisher = {Springer}, + month = apr, + editor = {Edmund M. Clarke and Andrei Voronkov}, + pages = {348-370}, +} + +@book{BackVonWright:Book, + author = "Ralph-Johan Back and von Wright, Joakim", + title = "Refinement Calculus: A Systematic Introduction", + series = "Graduate Texts in Computer Science", + publisher = "Springer-Verlag", + year = 1998 +} + +@Article{BalzerCheathamGreen:1990s, + author = {Robert Balzer and {Cheatham, Jr.}, Thomas E. and Cordell Green}, + title = {Software Technology in the 1990's: Using a New Paradigm}, + journal = {IEEE Computer}, + year = {1983}, + volume = {16}, + number = {11}, + pages = {39-45 }, + month = nov, +} + +@InProceedings{Zloof:QBE, + author = {Mosh{\'e} M. Zloof}, + title = {Query by Example}, + booktitle = {American Federation of Information Processing + Societies: 1975 National Computer Conference}, + pages = {431-438}, + year = {1975}, + month = may, + publisher = {AFIPS Press }, +} + +@InProceedings{HarrisGulwani:PLDI2011, + author = {William R. Harris and Sumit Gulwani}, + title = {Spreadsheet table transformations from examples}, + booktitle = {Proceedings of the 32nd ACM SIGPLAN Conference on + Programming Language Design and Implementation, PLDI + 2011}, + pages = {317-328}, + year = {2011}, + editor = {Mary W. Hall and David A. Padua}, + month = jun, + publisher = {ACM}, +} + +@Article{Smith:KIDS-overview, + author = "Douglas R. Smith", + title = "{KIDS}: A Semi-Automatic Program Development System", + journal = {IEEE Transactions on Software Engineering }, + volume = 16, + number = 9, + month = sep, + year = 1990, + pages = "1024-1043", +} + +@Article{RodinToolset, + author = {Jean-Raymond Abrial and Michael Butler and Stefan + Hallerstede and Thai Son Hoang and Farhad Mehta and + Laurent Voisin}, + title = {Rodin: An Open Toolset for Modelling and Reasoning in {Event-B}}, + journal = {International Journal on Software Tools for Technology Transfer}, + year = {2010}, + month = apr, +} + +@Article{Summers:LISP-from-examples, + author = {Phillip D. Summers}, + title = {A Methodology for {LISP} Program Construction from Examples}, + journal = jacm, + year = {1977}, + volume = {24}, + number = {1}, + pages = {161-175}, + month = jan, +} + +@InProceedings{Pex:overview, + author = {Nikolai Tillmann and de Halleux, Jonathan}, + title = {Pex---White Box Test Generation for {.NET}}, + booktitle = {Tests and Proofs, Second International Conference, TAP 2008}, + pages = {134-153}, + year = {2008}, + editor = {Bernhard Beckert and Reiner H{\"a}hnle}, + series = lncs, + volume = {4966}, + month = apr, + publisher = {Springer}, +} + +@InProceedings{GodefroidKlarlundSen:DART, + author = {Patrice Godefroid and Nils Klarlund and Koushik Sen}, + title = {{DART}: directed automated random testing}, + booktitle = {Proceedings of the ACM SIGPLAN 2005 Conference on + Programming Language Design and Implementation}, + pages = {213-223}, + year = {2005}, + editor = {Vivek Sarkar and Mary W. Hall}, + month = jun, + publisher = {ACM}, +} + +@PhdThesis{Monahan:thesis, + author = {Rosemary Monahan}, + title = {Data Refinement in Object-Oriented Verification}, + school = {Dublin City University}, + year = {2010}, +} + +@InProceedings{Denali:pldi2002, + author = {Rajeev Joshi and Greg Nelson and Keith H. Randall}, + title = {Denali: A Goal-directed Superoptimizer}, + booktitle = {Proceedings of the 2002 ACM SIGPLAN Conference on + Programming Language Design and Implementation + (PLDI)}, + pages = {304-314}, + year = {2002}, + month = jun, + publisher = {ACM}, +} +@Book{SETL, + author = {J. T. Schwartz and R. B. K. Dewar and E. Dubinsky and E. Schonberg}, + title = {Programming with Sets: An Introduction to {SETL}}, + series = {Texts and Monographs in Computer Science}, + publisher = {Springer}, + year = {1986}, +} + +@InProceedings{KuncakEtAl:PLDI2010, + author = {Viktor Kuncak and Mika{\"e}l Mayer and Ruzica Piskac + and Philippe Suter}, + title = {Complete functional synthesis}, + booktitle = {Proceedings of the 2010 ACM SIGPLAN Conference on + Programming Language Design and Implementation, PLDI + 2010}, + pages = {316-329}, + year = {2010}, + editor = {Benjamin G. Zorn and Alexander Aiken}, + month = jun, + publisher = {ACM}, +} + +@Article{JML:ToolSuite:STTT, + author = {Lilian Burdy and Yoonsik Cheon and David R. Cok and + Michael D. Ernst and Joeseph R. Kiniry and Gary T. Leavens and + K. Rustan M. Leino and Erik Poll}, + title = {An overview of {JML} tools and applications}, + journal = {International Journal on Software Tools + for Technology Transfer}, + volume = 7, + number = 3, + publisher = {Springer}, + month = jun, + year = 2005, + pages = {212-232}, +} + +@InProceedings{Green:ProblemSolving, + author = {Cordell Green}, + title = {Application of Theorem Proving to Problem Solving}, + booktitle = {Proceedings of the 1st International Joint Conference on Artificial Intelligence}, + editor = {Donald E. Walker and Lewis M. Norton}, + pages = {219-240}, + year = {1969}, + month = may, + publisher = {William Kaufmann}, +} + +@Article{MannaWaldinger:CACM1971, + author = {Zohar Manna and Richard J. Waldinger}, + title = {Towards automatic program synthesis}, + journal = cacm, + year = {1971}, + volume = {14}, + number = {3}, + pages = {151-165}, + month = mar, +} + +@Article{RichWaters:ProgAppren, + author = {Charles Rich and Richard C. Waters}, + title = {The {P}rogrammer's {A}pprentice: A Research Overview}, + journal = {IEEE Computer}, + year = {1988}, + volume = {21}, + number = {11}, + pages = {10-25}, + month = nov, +} + +@InProceedings{Green:PSI, + author = {Cordell Green}, + title = {The Design of the {PSI} Program Synthesis System}, + booktitle = {Proceedings of the 2nd International Conference on Software Engineering}, + pages = {4-18}, + year = {1976}, + month = oct, + publisher = {IEEE Computer Society}, +} + +@Article{SpecSharp:Retrospective:CACM, + author = {Mike Barnett and Manuel F{\"a}hndrich and + K. Rustan M. Leino and Peter M{\"u}ller and + Wolfram Schulte and Herman Venter}, + title = {Specification and Verification: The {Spec\#} Experience}, + journal = cacm, + volume = {54}, + number = {6}, + pages = {81-91}, + month = jun, + year = 2011, +} + +@article{Filipovic:SepLogicRefinement, + author = {Ivana Filipovi{\'c} and Peter O'Hearn and + Noah Torp-Smith and Hongseok Yang}, + title = {Blaming the client: on data refinement in the presence of pointers}, + journal = {Formal Aspects of Computing}, + volume = {22}, + number = {5}, + month = sep, + year = {2010}, + pages = {547-583}, +} + +@inproceedings{Grandy:JavaRefinement, + author = {Grandy, Holger and Stenzel, Kurt and Reif, Wolfgang}, + title = {A refinement method for {J}ava programs}, + booktitle = {Formal Methods for Open Object-Based Distributed Systems, 9th IFIP WG 6.1 International Conference, FMOODS 2007}, + editor = {Marcello M. Bonsangue and Einar Broch Johnsen}, + series = lncs, + number = {4468}, + month = jun, + year = {2007}, + publisher = {Springer}, + pages = {221--235}, +} + +@InCollection{KoenigLeino:MOD2011, + author = {Jason Koenig and K. Rustan M. Leino}, + title = {Getting Started with {D}afny: A Guide}, + booktitle = {Software Safety and Security: Tools for Analysis and Verification}, + pages = {152-181}, + publisher = {IOS Press}, + year = {2012}, + editor = {Tobias Nipkow and Orna Grumberg and Benedikt Hauptmann}, + volume = {33}, + series = {NATO Science for Peace and Security Series D: Information and Communication Security}, + note = {Summer School Marktoberdorf 2011 lecture notes}, +} + +@InProceedings{VonWright:ExtendingWindowInference, + author = {von Wright, Joakim}, + title = {Extending Window Inference}, + booktitle = {Theorem Proving in Higher Order Logics, 11th International Conference, TPHOLs'98}, + pages = {17-32}, + year = {1998}, + editor = {Jim Grundy and Malcolm C. Newey}, + volume = {1479}, + series = lncs, + publisher = {Springer}, +} + +@InProceedings{BauerWenzel:IsarExperience, + author = {Gertrud Bauer and Markus Wenzel}, + title = {Calculational reasoning revisited: an {I}sabelle/{I}sar experience}, + booktitle = {Theorem Proving in Higher Order Logics, 14th International Conference, TPHOLs 2001}, + pages = {75-90}, + year = {2001}, + editor = {Richard J. Boulton and Paul B. Jackson}, + volume = {2152}, + series = lncs, + month = sep, + publisher = {Springer}, +} + +@InProceedings{Leino:induction, + author = {K. Rustan M. Leino}, + title = {Automating Induction with an {SMT} Solver}, + booktitle = {Verification, Model Checking, and Abstract Interpretation --- 13th International Conference, VMCAI 2012}, + pages = {315-331}, + year = {2012}, + editor = {Viktor Kuncak and Andrey Rybalchenko}, + volume = {7148}, + series = lncs, + month = jan, + publisher = {Springer}, +} + +@InProceedings{LGLM:BVD, + author = {Le Goues, Claire and K. Rustan M. Leino and Micha{\l} Moskal}, + title = {The {B}oogie {V}erification {D}ebugger (Tool Paper)}, + booktitle = {Software Engineering and Formal Methods --- 9th International Conference, SEFM 2011}, + pages = {407-414}, + year = {2011}, + editor = {Gilles Barthe and Alberto Pardo and Gerardo Schneider}, + volume = {7041}, + series = lncs, + month = nov, + publisher = {Springer}, +} + +@InProceedings{Filliatre:2lines, + author = {Jean-Christophe Filli{\^a}tre}, + title = {Verifying two lines of {C} with {Why3}: an exercise in + program verification}, + booktitle = {Verified Software: Theories, Tools, Experiments --- + 4th International Conference, VSTTE 2012}, + pages = {83-97}, + year = {2012}, + editor = {Rajeev Joshi and Peter M{\"u}ller and Andreas Podelski}, + volume = {7152}, + series = lncs, + month = jan, + publisher = {Springer}, +} + +@InCollection{LeinoMoskal:UsableProgramVerification, + author = {K. Rustan M. Leino and Micha{\l} Moskal}, + title = {Usable Auto-Active Verification}, + booktitle = {UV10 (Usable Verification) workshop}, + year = {2010}, + editor = {Tom Ball and Lenore Zuck and N. Shankar}, + month = nov, + publisher = {\url{http://fm.csl.sri.com/UV10/}}, +} + +@InProceedings{LeinoMonahan:Comprehensions, + author = {K. Rustan M. Leino and Rosemary Monahan}, + title = {Reasoning about Comprehensions with First-Order {SMT} Solvers}, + booktitle = {Proceedings of the 2009 ACM Symposium on Applied Computing (SAC)}, + editor = {Sung Y. Shin and Sascha Ossowski}, + publisher = {ACM}, + month = mar, + year = 2009, + pages = {615-622}, +} + +@TechReport{VeriFast:TR, + author = {Bart Jacobs and Frank Piessens}, + title = {The {VeriFast} program verifier}, + institution = {Department of Computer Science, Katholieke Universiteit Leuven}, + year = {2008}, + number = {CW-520}, + month = aug, +} + +@book{DijkstraScholten:book, + author = "Edsger W. Dijkstra and Carel S. Scholten", + title = "Predicate Calculus and Program Semantics", + publisher = "Springer-Verlag", + series = "Texts and Monographs in Computer Science", + year = 1990 +} + +@Book{KeY:book, + author = {Bernhard Beckert and Reiner H{\"a}hnle and Peter H. Schmitt}, + title = {Verification of Object-Oriented Software: The {KeY} Approach}, + volume = 4334, + series = lnai, + publisher = {Springer}, + year = 2007, +} + +@Book{Coq:book, + author = {Yves Bertot and Pierre Cast{\'e}ran}, + title = {Interactive Theorem Proving and Program Development --- {C}oq'{A}rt: The Calculus of Inductive Constructions}, + publisher = {Springer}, + year = {2004}, + series = {Texts in Theoretical Computer Science}, +} + +@Book{ACL2:book, + author = {Matt Kaufmann and Panagiotis Manolios and J Strother Moore}, + title = {Computer-Aided Reasoning: An Approach}, + publisher = {Kluwer Academic Publishers}, + year = {2000}, +} + +@InProceedings{Coq:Coinduction, + author = {Eduardo Gim{\'e}nez}, + title = {An Application of Co-inductive Types in {Coq}: Verification of the Alternating Bit Protocol}, + booktitle = {Types for Proofs and Programs, International Workshop TYPES'95}, + pages = {135-152}, + year = {1996}, + editor = {Stefano Berardi and Mario Coppo}, + volume = 1158, + series = lncs, + publisher = {Springer}, +} + +@InCollection{JacobsRutten:IntroductionCoalgebra, + author = {Bart Jacobs and Jan Rutten}, + title = {An Introduction to (Co)Algebra and (Co)Induction}, + booktitle = {Advanced Topics in Bisimulation and Coinduction}, + editor = {Davide Sangiorgi and Jan Rutten}, + series = {Cambridge Tracts in Theoretical Computer Science}, + number = {52}, + publisher = {Cambridge University Press}, + month = oct, + year = {2011}, + pages = {38-99}, +} + +@InProceedings{SonnexEtAl:Zeno, + author = {William Sonnex and Sophia Drossopoulou and Susan Eisenbach}, + title = {Zeno: An Automated Prover for Properties of Recursive + Data Structures}, + booktitle = {Tools and Algorithms for the Construction and Analysis of + Systems --- 18th International Conference, TACAS 2012}, + editor = {Cormac Flanagan and Barbara K{\"o}nig}, + volume = {7214}, + series = lncs, + year = {2012}, + month = mar # "--" # apr, + publisher = {Springer}, + pages = {407-421}, +} + +@InProceedings{JohanssonEtAl:IPT2010, + author = {Moa Johansson and Lucas Dixon and Alan Bundy}, + title = {Case-Analysis for {R}ippling and Inductive Proof}, + booktitle = {Interactive Theorem Proving, First International Conference, ITP 2010}, + editor = {Matt Kaufmann and Lawrence C. Paulson}, + volume = {6172}, + series = lncs, + publisher = {Springer}, + month = jul, + year = {2010}, + pages = {291-306}, +} + +@Article{HatcliffEtAl:BISL, + author = {John Hatcliff and Gary T. Leavens and + K. Rustan M. Leino and Peter M{\"u}ller and Matthew Parkinson}, + title = {Behavioral interface specification languages}, + journal = {ACM Computing Surveys}, + volume = {44}, + number = {3}, + note = {Article 16}, + month = jun, + year = {2012}, +} + +@InProceedings{BoehmeNipkow:Sledgehammer, + author = {Sascha B{\"o}hme and Tobias Nipkow}, + title = {Sledgehammer: {J}udgement {D}ay}, + booktitle = {Automated Reasoning, 5th International Joint Conference, IJCAR 2010}, + editor = {J{\"u}rgen Giesl and Reiner H{\"a}hnle}, + year = {2010}, + pages = {107-121}, + volume = {6173}, + series = lncs, + month = jul, + publisher = {Springer}, +} + +@InProceedings{Dafny:LASER2011, + author = {Luke Herbert and K. Rustan M. Leino and Jose Quaresma}, + title = {Using {Dafny}, an Automatic Program Verifier}, + booktitle = {Tools for Practical Software Verification, {LASER}, International Summer School 2011}, + editor = {Bertrand Meyer and Martin Nordio}, + volume = {7682}, + series = lncs, + year = {2012}, + pages = {156-181}, + publisher = {Springer}, +} + +@Article{Leroy:CompCert:CACM, + author = {Xavier Leroy}, + title = {Formal verification of a realistic compiler}, + journal = cacm, + volume = {52}, + number = {7}, + year = {2009}, + pages = {107-115}, +} + +@InProceedings{Leino:ITP2013, + author = {K. Rustan M. Leino}, + title = {Automating Theorem Proving with {SMT}}, + booktitle = {Interactive Theorem Proving --- 4th International Conference, ITP 2013}, + year = {2013}, + editor = {Sandrine Blazy and Christine Paulin-Mohring and David Pichardie}, + volume = {7998}, + series = lncs, + pages = {2-16}, + month = jul, + publisher = {Springer}, +} + +@techreport{Nelson:thesis, + author = "Charles Gregory Nelson", + title = "Techniques for Program Verification", + institution = "Xerox PARC", + month = jun, + year = 1981, + number = "CSL-81-10", + note = "The author's PhD thesis" +} + +@InProceedings{LernerMillsteinChambers:VerifiedOptimizations, + author = {Sorin Lerner and Todd Millstein and Craig Chambers}, + title = {Automatically proving the correctness of compiler optimizations}, + booktitle = {Proceedings of the ACM SIGPLAN 2003 Conference on + Programming Language Design and Implementation 2003}, + year = {2003}, + editor = {Ron Cytron and Rajiv Gupta}, + pages = {220-231}, + month = jun, + publisher = {ACM}, +} + +@InProceedings{BoyerHunt:ACL2, + author = {Robert S. Boyer and Hunt, Jr., Warren A.}, + title = {Function Memoization and Unique Object Representation for {ACL2} Functions}, + booktitle = {Proceedings of the Sixth International Workshop on + the ACL2 Theorem Prover and its Applications, ACL2 2006}, + editor = {Panagiotis Manolios and Matthew Wilding}, + month = aug, + year = {2006}, + pages = {81--89}, + publisher = {ACM}, +} + +@inproceedings{LeinoWuestholz:DafnyIDE, + author = {K. Rustan M. Leino and + Valentin W{\"{u}}stholz}, + title = {The {D}afny Integrated Development Environment}, + booktitle = {Proceedings 1st Workshop on Formal Integrated Development Environment, + {F-IDE} 2014}, + month = apr, + year = {2014}, + pages = {3--15}, + editor = {Catherine Dubois and + Dimitra Giannakopoulou and + Dominique M{\'{e}}ry}, + series = {{EPTCS}}, + volume = {149}, +} + +@inproceedings{BarnettLeino:Weakest, + author = {Mike Barnett and K. Rustan M. Leino}, + title = {Weakest-precondition of unstructured programs}, + booktitle = {Proceedings of the 2005 ACM SIGPLAN-SIGSOFT Workshop on + Program Analysis For Software Tools and Engineering, + PASTE'05}, + editor = {Michael D. Ernst and Thomas P. Jensen}, + month = sep, + year = {2005}, + pages = {82-87}, + publisher = {ACM}, +} diff -Nru dafny-1.9.5/.hgignore dafny-1.9.7/.hgignore --- dafny-1.9.5/.hgignore 2015-05-11 08:03:26.000000000 +0000 +++ dafny-1.9.7/.hgignore 2016-06-05 21:11:14.000000000 +0000 @@ -1,11 +1,19 @@ syntax: regexp ^Source/.*\.(user|suo|cache|vs10x)$ -^Binaries/.*\.(dll|pdb|exe|manifest|config|smt2|vsix|vsixmanifest|bpl|pkgdef)$ +^Binaries/.*\.(dll|pdb|exe|manifest|config|smt2|vsix|vsixmanifest|bpl|pkgdef|xml)$ ^Source/.*\.(smt2|bpl)$ ^.*(bin|obj)/([^/]*/)?(Debug|Release|Checked|Debug All|DEBUG ALL)/.*$ Test/.*/Output Test/desktop/.* Test/([^/]*)/([^/]*)\.sx +^Test/sandbox/.* +^Test/.*\.csv +Package/.* +Test/.*/flycheck_.* +.*\.orig +Test/.*\.bpl +Test/.*/axiom-profiler.html +Test/.*/z3.log syntax: glob *.exe *.pdb @@ -13,3 +21,6 @@ *.tmp *.tmp.dfy Source/DafnyExtension/DafnyRuntime.cs +Source/DafnyExtension/Z3-LICENSE.txt +Test/failing.lst +packages/* diff -Nru dafny-1.9.5/INSTALL dafny-1.9.7/INSTALL --- dafny-1.9.5/INSTALL 1970-01-01 00:00:00.000000000 +0000 +++ dafny-1.9.7/INSTALL 2016-06-05 21:11:14.000000000 +0000 @@ -0,0 +1,57 @@ +Building on Linux +================= + +Tested on a fresh Linux Mint 17.2. Note that we now have official releases for +Linux, so these instructions mostly apply to people interested in looking at +Dafny's sources. + +0. Dependencies + + apt install mono-devel g++ + +1. Create an empty base directory + + mkdir BASE-DIRECTORY + cd BASE-DIRECTORY + +2. Download and build Boogie: + + git clone https://github.com/boogie-org/boogie + cd boogie + mozroots --import --sync + wget https://nuget.org/nuget.exe + mono ./nuget.exe restore Source/Boogie.sln + xbuild Source/Boogie.sln + cd .. + +3. Download and build Dafny: + + hg clone https://hg.codeplex.com/dafny + cd dafny/Source/ + xbuild Dafny.sln + +4. Download and unpack z3 (Dafny looks for `z3` in Binaries/z3/bin/) + + cd BASE-DIRECTORY + wget https://github.com/Z3Prover/z3/releases/download/z3-4.4.0/z3-4.4.0-x64-ubuntu-14.04.zip + unzip z3-4.4.0-x64-ubuntu-14.04.zip + mv z3-4.4.0-x64-ubuntu-14.04 dafny/Binaries/z3 + +5. (Optional) If you plan to use Boogie directly, copy (or symlink) the z3 binary so that Boogie, too, can find it: + + cd BASE-DIRECTORY + rm -f boogie/Binaries/z3.exe + cp dafny/Binaries/z3/bin/z3 boogie/Binaries/z3.exe + +6. Run Dafny using the `dafny` shell script in the Binaries directory (it calls mono as appropriate) + +Editing in Emacs +================ + +The README at https://github.com/boogie-org/boogie-friends has plenty of +information on how to set-up Emacs to work with Dafny. In short, it boils down +to running [M-x package-install RET boogie-friends RET] and adding the following +to your .emacs: + (setq flycheck-dafny-executable "BASE-DIRECTORY/dafny/Binaries/dafny") + +Do look at the README, though! It's full of useful tips. diff -Nru dafny-1.9.5/LICENSE dafny-1.9.7/LICENSE --- dafny-1.9.5/LICENSE 1970-01-01 00:00:00.000000000 +0000 +++ dafny-1.9.7/LICENSE 2016-06-05 21:11:14.000000000 +0000 @@ -0,0 +1,22 @@ +Microsoft Public License (MS-PL) + +This license governs use of the accompanying software. If you use the software, you +accept this license. If you do not accept the license, do not use the software. + +1. Definitions +The terms "reproduce," "reproduction," "derivative works," and "distribution" have the +same meaning here as under U.S. copyright law. +A "contribution" is the original software, or any additions or changes to the software. +A "contributor" is any person that distributes its contribution under this license. +"Licensed patents" are a contributor's patent claims that read directly on its contribution. + +2. Grant of Rights +(A) Copyright Grant- Subject to the terms of this license, including the license conditions and limitations in section 3, each contributor grants you a non-exclusive, worldwide, royalty-free copyright license to reproduce its contribution, prepare derivative works of its contribution, and distribute its contribution or any derivative works that you create. +(B) Patent Grant- Subject to the terms of this license, including the license conditions and limitations in section 3, each contributor grants you a non-exclusive, worldwide, royalty-free license under its licensed patents to make, have made, use, sell, offer for sale, import, and/or otherwise dispose of its contribution in the software or derivative works of the contribution in the software. + +3. Conditions and Limitations +(A) No Trademark License- This license does not grant you rights to use any contributors' name, logo, or trademarks. +(B) If you bring a patent claim against any contributor over patents that you claim are infringed by the software, your patent license from such contributor to the software ends automatically. +(C) If you distribute any portion of the software, you must retain all copyright, patent, trademark, and attribution notices that are present in the software. +(D) If you distribute any portion of the software in source code form, you may do so only under this license by including a complete copy of this license with your distribution. If you distribute any portion of the software in compiled or object code form, you may only do so under a license that complies with this license. +(E) The software is licensed "as-is." You bear the risk of using it. The contributors give no express warranties, guarantees or conditions. You may have additional consumer rights under your local laws which this license cannot change. To the extent permitted under your local laws, the contributors exclude the implied warranties of merchantability, fitness for a particular purpose and non-infringement. diff -Nru dafny-1.9.5/package.py dafny-1.9.7/package.py --- dafny-1.9.5/package.py 1970-01-01 00:00:00.000000000 +0000 +++ dafny-1.9.7/package.py 2016-06-05 21:11:14.000000000 +0000 @@ -0,0 +1,219 @@ +#!/usr/bin/env python3 + +from fnmatch import fnmatch +from os import path +import argparse +import json +import os +import re +import subprocess +import sys +import time +import urllib.request +import zipfile + +# Configuration + +## Where do we fetch the list of releases from? +RELEASES_URL = "https://api.github.com/repos/Z3Prover/z3/releases/latest" +## How do we extract info from the name of a release file? +RELEASE_REGEXP = re.compile(r"^(?Pz3-[0-9\.]+-(?Px86|x64)-(?P[a-z0-9\.\-]+)).zip$", re.IGNORECASE) + +## Where are the sources? +SOURCE_DIRECTORY = "Source" +## Where do the binaries get put? +BINARIES_DIRECTORY = "Binaries" +## Where do we store the built packages and cache files? +DESTINATION_DIRECTORY = "Package" + +## What's the root folder of the archive? +DAFNY_PACKAGE_PREFIX = path.join("dafny") +## What sub-folder of the packages does z3 go into? +Z3_PACKAGE_PREFIX = path.join("z3") + +## What do we take from the z3 archive? (Glob syntax) +Z3_INTERESTING_FILES = ["LICENSE.txt", "bin/*"] + +## On unix systems, which Dafny files should be marked as executable? (Glob syntax; Z3's permissions are preserved) +UNIX_EXECUTABLES = ["dafny", "dafny-server"] + +## What do we take from Dafny's Binaries folder? +DLLs = ["AbsInt", + "Basetypes", + "CodeContractsExtender", + "Concurrency", + "Core", + "DafnyPipeline", + "Doomed", + "ExecutionEngine", + "Graph", + "Houdini", + "Model", + "ModelViewer", + "ParserHelper", + "Provers.SMTLib", + "VCExpr", + "VCGeneration"] +EXEs = ["Dafny", "DafnyServer"] +ETCs = UNIX_EXECUTABLES + ["DafnyPrelude.bpl", "DafnyRuntime.cs", "DafnyLanguageService.vsix"] + +# Constants + +THIS_FILE = path.realpath(__file__) +ROOT_DIRECTORY = path.dirname(THIS_FILE) +SOURCE_DIRECTORY = path.join(ROOT_DIRECTORY, SOURCE_DIRECTORY) +BINARIES_DIRECTORY = path.join(ROOT_DIRECTORY, BINARIES_DIRECTORY) +DESTINATION_DIRECTORY = path.join(ROOT_DIRECTORY, DESTINATION_DIRECTORY) +CACHE_DIRECTORY = path.join(DESTINATION_DIRECTORY, "cache") + +MONO = sys.platform not in ("win32", "cygwin") +DLL_PDB_EXT = ".dll.mdb" if MONO else ".pdb" +EXE_PDB_EXT = ".exe.mdb" if MONO else ".pdb" +ARCHIVE_FNAMES = ([dll + ".dll" for dll in DLLs] + [dll + DLL_PDB_EXT for dll in DLLs] + + [exe + ".exe" for exe in EXEs] + [exe + EXE_PDB_EXT for exe in EXEs] + + ETCs) + +# Code + +def flush(*args, **kwargs): + print(*args, **kwargs) + sys.stdout.flush() + +class Release: + @staticmethod + def parse_zip_name(name): + m = RELEASE_REGEXP.match(name) + if not m: + raise Exception("{} does not match RELEASE_REGEXP".format(name)) + return m.group('platform'), m.group('os'), m.group("directory") + + def __init__(self, js, version): + self.z3_name = js["name"] + self.size = js["size"] + self.url = js["browser_download_url"] + self.platform, self.os, self.directory = Release.parse_zip_name(js["name"]) + self.z3_zip = path.join(CACHE_DIRECTORY, self.z3_name) + self.dafny_name = "dafny-{}-{}-{}.zip".format(version, self.platform, self.os) + self.dafny_zip = path.join(DESTINATION_DIRECTORY, self.dafny_name) + + @property + def cached(self): + return path.exists(self.z3_zip) and path.getsize(self.z3_zip) == self.size + + @property + def MB(self): + return self.size / 1e6 + + def download(self): + if self.cached: + print("cached!") + else: + flush("downloading {:.2f}MB...".format(self.MB), end=' ') + with urllib.request.urlopen(self.url) as reader: + with open(self.z3_zip, mode="wb") as writer: + writer.write(reader.read()) + flush("done!") + + @staticmethod + def zipify_path(fpath): + """Zip entries always use '/' as the path separator.""" + return fpath.replace(os.path.sep, '/') + + def pack(self): + try: + os.remove(self.dafny_zip) + except FileNotFoundError: + pass + missing = [] + with zipfile.ZipFile(self.dafny_zip, 'w', zipfile.ZIP_DEFLATED) as archive: + with zipfile.ZipFile(self.z3_zip) as Z3_archive: + z3_files_count = 0 + for fileinfo in Z3_archive.infolist(): + fname = path.relpath(fileinfo.filename, self.directory) + if any(fnmatch(fname, pattern) for pattern in Z3_INTERESTING_FILES): + z3_files_count += 1 + contents = Z3_archive.read(fileinfo) + fileinfo.filename = Release.zipify_path(path.join(DAFNY_PACKAGE_PREFIX, Z3_PACKAGE_PREFIX, fname)) + archive.writestr(fileinfo, contents) + for fname in ARCHIVE_FNAMES: + fpath = path.join(BINARIES_DIRECTORY, fname) + if path.exists(fpath): + fileinfo = zipfile.ZipInfo(fname, time.localtime(os.stat(fpath).st_mtime)[:6]) + if any(fnmatch(fname, pattern) for pattern in UNIX_EXECUTABLES): + # http://stackoverflow.com/questions/434641/ + fileinfo.external_attr = 0o777 << 16 + contents = open(fpath, mode='rb').read() + fileinfo.compress_type = zipfile.ZIP_DEFLATED + fileinfo.filename = Release.zipify_path(path.join(DAFNY_PACKAGE_PREFIX, fname)) + archive.writestr(fileinfo, contents) + else: + missing.append(fname) + flush("done! (imported {} files from z3's sources)".format(z3_files_count)) + if missing: + flush(" WARNING: Not all files were found: {} were missing".format(", ".join(missing))) + +def discover(version): + flush(" - Getting information about latest release") + with urllib.request.urlopen(RELEASES_URL) as reader: + js = json.loads(reader.read().decode("utf-8")) + + for release_js in js["assets"]: + release = Release(release_js, version) + if release.platform == "x64": + flush(" + Selecting {} ({:.2f}MB, {})".format(release.z3_name, release.MB, release.size)) + yield release + else: + flush(" + Rejecting {}".format(release.z3_name)) + +def download(releases): + flush(" - Downloading {} z3 archives".format(len(releases))) + for release in releases: + flush(" + {}:".format(release.z3_name), end=' ') + release.download() + +def run(cmd): + flush(" + {}...".format(" ".join(cmd)), end=' ') + retv = subprocess.call(cmd, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) + if retv != 0: + flush("failed! (Is Dafny or the Dafny server running?)") + sys.exit(1) + else: + flush("done!") + +def build(): + os.chdir(ROOT_DIRECTORY) + flush(" - Building") + builder = "xbuild" if MONO else "msbuild" + try: + run([builder, "Source/Dafny.sln", "/p:Configuration=Checked", "/p:Platform=Any CPU", "/t:Clean"]) + run([builder, "Source/Dafny.sln", "/p:Configuration=Checked", "/p:Platform=Any CPU", "/t:Rebuild"]) + except FileNotFoundError: + flush("Could not find '{}'! On Windows, you need to run this from the VS native tools command prompt.".format(builder)) + sys.exit(1) + +def pack(releases): + flush(" - Packaging {} Dafny archives".format(len(releases))) + for release in releases: + flush(" + {}:".format(release.dafny_name), end=' ') + release.pack() + +def parse_arguments(): + parser = argparse.ArgumentParser(description="Prepare a Dafny release. Configuration is hardcoded; edit the `# Configuration' section of this script to change it.") + parser.add_argument("version", help="Version number for this release") + return parser.parse_args() + +def main(): + args = parse_arguments() + os.makedirs(CACHE_DIRECTORY, exist_ok=True) + + # Z3 + flush("* Finding and downloading Z3 releases") + releases = list(discover(args.version)) + download(releases) + + flush("* Building and packaging Dafny") + build() + pack(releases) + +if __name__ == '__main__': + main() diff -Nru dafny-1.9.5/Source/Dafny/BigIntegerParser.cs dafny-1.9.7/Source/Dafny/BigIntegerParser.cs --- dafny-1.9.5/Source/Dafny/BigIntegerParser.cs 1970-01-01 00:00:00.000000000 +0000 +++ dafny-1.9.7/Source/Dafny/BigIntegerParser.cs 2016-06-05 21:11:14.000000000 +0000 @@ -0,0 +1,27 @@ +using System; +using System.Numerics; +using System.Globalization; + +namespace Microsoft.Dafny { + internal static class BigIntegerParser { + /// + /// Mono does not support the BigInteger.TryParse method. In practice, + /// we seldom actually need to parse huge integers, so it makes sense + /// to support most real-life cases by simply trying to parse using + /// Int64, and only falling back if needed. + /// + internal static BigInteger Parse(string str, NumberStyles style) { + UInt64 parsed; + if (UInt64.TryParse(str, style, NumberFormatInfo.CurrentInfo, out parsed)) { + return new BigInteger(parsed); + } else { + // Throws on Mono 3.2.8 + return BigInteger.Parse(str, style); + } + } + + internal static BigInteger Parse(string str) { + return BigIntegerParser.Parse(str, NumberStyles.Integer); + } + } +} \ No newline at end of file diff -Nru dafny-1.9.5/Source/Dafny/Cloner.cs dafny-1.9.7/Source/Dafny/Cloner.cs --- dafny-1.9.5/Source/Dafny/Cloner.cs 2015-05-11 08:03:26.000000000 +0000 +++ dafny-1.9.7/Source/Dafny/Cloner.cs 2016-06-05 21:11:14.000000000 +0000 @@ -17,12 +17,18 @@ if (m is DefaultModuleDecl) { nw = new DefaultModuleDecl(); } else { - nw = new ModuleDefinition(Tok(m.tok), name, m.IsAbstract, m.IsFacade, m.RefinementBaseName, m.Module, CloneAttributes(m.Attributes), true); + nw = new ModuleDefinition(Tok(m.tok), name, m.IsAbstract, m.IsFacade, m.IsExclusiveRefinement, m.RefinementBaseName, m.Module, CloneAttributes(m.Attributes), true); } foreach (var d in m.TopLevelDecls) { nw.TopLevelDecls.Add(CloneDeclaration(d, nw)); } - nw.RefinementBase = m.RefinementBase; + if (null != m.RefinementBase) { + nw.RefinementBase = m.RefinementBase; + } + if (null != m.RefinementBaseSig) { + nw.RefinementBaseSig = m.RefinementBaseSig; + } + nw.ClonedFrom = m; nw.Height = m.Height; return nw; } @@ -33,17 +39,17 @@ if (d is OpaqueTypeDecl) { var dd = (OpaqueTypeDecl)d; - return new OpaqueTypeDecl(Tok(dd.tok), dd.Name, m, dd.EqualitySupport, dd.TypeArgs.ConvertAll(CloneTypeParam), CloneAttributes(dd.Attributes)); + return new OpaqueTypeDecl(Tok(dd.tok), dd.Name, m, dd.EqualitySupport, dd.TypeArgs.ConvertAll(CloneTypeParam), CloneAttributes(dd.Attributes), d); } else if (d is TypeSynonymDecl) { var dd = (TypeSynonymDecl)d; var tps = dd.TypeArgs.ConvertAll(CloneTypeParam); - return new TypeSynonymDecl(Tok(dd.tok), dd.Name, tps, m, CloneType(dd.Rhs), CloneAttributes(dd.Attributes)); + return new TypeSynonymDecl(Tok(dd.tok), dd.Name, tps, m, CloneType(dd.Rhs), CloneAttributes(dd.Attributes), dd); } else if (d is NewtypeDecl) { var dd = (NewtypeDecl)d; if (dd.Var == null) { - return new NewtypeDecl(Tok(dd.tok), dd.Name, m, CloneType(dd.BaseType), CloneAttributes(dd.Attributes)); + return new NewtypeDecl(Tok(dd.tok), dd.Name, m, CloneType(dd.BaseType), CloneAttributes(dd.Attributes), dd); } else { - return new NewtypeDecl(Tok(dd.tok), dd.Name, m, CloneBoundVar(dd.Var), CloneExpr(dd.Constraint), CloneAttributes(dd.Attributes)); + return new NewtypeDecl(Tok(dd.tok), dd.Name, m, CloneBoundVar(dd.Var), CloneExpr(dd.Constraint), CloneAttributes(dd.Attributes), dd); } } else if (d is TupleTypeDecl) { var dd = (TupleTypeDecl)d; @@ -52,13 +58,13 @@ var dd = (IndDatatypeDecl)d; var tps = dd.TypeArgs.ConvertAll(CloneTypeParam); var ctors = dd.Ctors.ConvertAll(CloneCtor); - var dt = new IndDatatypeDecl(Tok(dd.tok), dd.Name, m, tps, ctors, CloneAttributes(dd.Attributes)); + var dt = new IndDatatypeDecl(Tok(dd.tok), dd.Name, m, tps, ctors, CloneAttributes(dd.Attributes), dd); return dt; } else if (d is CoDatatypeDecl) { var dd = (CoDatatypeDecl)d; var tps = dd.TypeArgs.ConvertAll(CloneTypeParam); var ctors = dd.Ctors.ConvertAll(CloneCtor); - var dt = new CoDatatypeDecl(Tok(dd.tok), dd.Name, m, tps, ctors, CloneAttributes(dd.Attributes)); + var dt = new CoDatatypeDecl(Tok(dd.tok), dd.Name, m, tps, ctors, CloneAttributes(dd.Attributes), dd); return dt; } else if (d is IteratorDecl) { var dd = (IteratorDecl)d; @@ -94,7 +100,7 @@ var dd = (TraitDecl)d; var tps = dd.TypeArgs.ConvertAll(CloneTypeParam); var mm = dd.Members.ConvertAll(CloneMember); - var cl = new TraitDecl(Tok(dd.tok), dd.Name, m, tps, mm, CloneAttributes(dd.Attributes)); + var cl = new TraitDecl(Tok(dd.tok), dd.Name, m, tps, mm, CloneAttributes(dd.Attributes), dd); return cl; } } @@ -103,9 +109,9 @@ var tps = dd.TypeArgs.ConvertAll(CloneTypeParam); var mm = dd.Members.ConvertAll(CloneMember); if (d is DefaultClassDecl) { - return new DefaultClassDecl(m, mm); + return new DefaultClassDecl(m, mm, ((DefaultClassDecl)d)); } else { - return new ClassDecl(Tok(dd.tok), dd.Name, m, tps, mm, CloneAttributes(dd.Attributes), dd.TraitsTyp.ConvertAll(CloneType)); + return new ClassDecl(Tok(dd.tok), dd.Name, m, tps, mm, CloneAttributes(dd.Attributes), dd.TraitsTyp.ConvertAll(CloneType), dd); } } else if (d is ModuleDecl) { if (d is LiteralModuleDecl) { @@ -123,6 +129,11 @@ abs.Signature = a.Signature; abs.OriginalSignature = a.OriginalSignature; return abs; + } else if (d is ModuleExportDecl) { + var a = (ModuleExportDecl)d; + var export = new ModuleExportDecl(a.tok, m, a.IsDefault, a.Exports, a.Extends); + export.Signature = a.Signature; + return export; } else { Contract.Assert(false); // unexpected declaration return null; // to please compiler @@ -134,11 +145,11 @@ } public DatatypeCtor CloneCtor(DatatypeCtor ct) { - return new DatatypeCtor(Tok(ct.tok), ct.Name, ct.Formals.ConvertAll(CloneFormal), CloneAttributes(ct.Attributes)); + return new DatatypeCtor(Tok(ct.tok), ct.Name, ct.Formals.ConvertAll(CloneFormal), CloneAttributes(ct.Attributes), ct); } public TypeParameter CloneTypeParam(TypeParameter tp) { - return new TypeParameter(Tok(tp.tok), tp.Name, tp.EqualitySupport); + return new TypeParameter(Tok(tp.tok), tp.Name, tp.EqualitySupport, tp); } public MemberDecl CloneMember(MemberDecl member) { @@ -160,7 +171,7 @@ return t; } else if (t is SetType) { var tt = (SetType)t; - return new SetType(CloneType(tt.Arg)); + return new SetType(tt.Finite, CloneType(tt.Arg)); } else if (t is SeqType) { var tt = (SeqType)t; return new SeqType(CloneType(tt.Arg)); @@ -187,7 +198,7 @@ return new InferredTypeProxy(); } else if (t is OperationTypeProxy) { var p = (OperationTypeProxy)t; - return new OperationTypeProxy(p.AllowInts, p.AllowReals, p.AllowChar, p.AllowSeq, p.AllowSetVarieties); + return new OperationTypeProxy(p.AllowInts, p.AllowReals, p.AllowChar, p.AllowSeq, p.AllowSetVarieties, p.AllowISet); } else if (t is ParamTypeProxy) { return new ParamTypeProxy(CloneTypeParam(((ParamTypeProxy)t).orig)); } else { @@ -225,6 +236,9 @@ public Attributes CloneAttributes(Attributes attrs) { if (attrs == null) { return null; + } else if (attrs.Name.StartsWith("_")) { + // skip this attribute, since it would have been produced during resolution + return CloneAttributes(attrs.Prev); } else { return new Attributes(attrs.Name, attrs.Args.ConvertAll(CloneExpr), CloneAttributes(attrs.Prev)); } @@ -243,7 +257,7 @@ var e = (LiteralExpr)expr; if (e is StaticReceiverExpr) { var ee = (StaticReceiverExpr)e; - return new StaticReceiverExpr(e.tok, CloneType(ee.UnresolvedType)); + return new StaticReceiverExpr(e.tok, CloneType(ee.UnresolvedType), ee.IsImplicit); } else if (e.Value == null) { return new LiteralExpr(Tok(e.tok)); } else if (e.Value is bool) { @@ -277,7 +291,7 @@ } else if (expr is DisplayExpression) { DisplayExpression e = (DisplayExpression)expr; if (expr is SetDisplayExpr) { - return new SetDisplayExpr(Tok(e.tok), e.Elements.ConvertAll(CloneExpr)); + return new SetDisplayExpr(Tok(e.tok), ((SetDisplayExpr)expr).Finite, e.Elements.ConvertAll(CloneExpr)); } else if (expr is MultiSetDisplayExpr) { return new MultiSetDisplayExpr(Tok(e.tok), e.Elements.ConvertAll(CloneExpr)); } else { @@ -294,15 +308,13 @@ return new MapDisplayExpr(Tok(expr.tok), e.Finite, pp); } else if (expr is NameSegment) { - var e = (NameSegment)expr; - return new NameSegment(Tok(e.tok), e.Name, e.OptTypeArguments == null ? null : e.OptTypeArguments.ConvertAll(CloneType)); + return CloneNameSegment(expr); } else if (expr is ExprDotName) { var e = (ExprDotName)expr; return new ExprDotName(Tok(e.tok), CloneExpr(e.Lhs), e.SuffixName, e.OptTypeArguments == null ? null : e.OptTypeArguments.ConvertAll(CloneType)); } else if (expr is ApplySuffix) { - var e = (ApplySuffix)expr; - return new ApplySuffix(Tok(e.tok), CloneExpr(e.Lhs), e.Args.ConvertAll(CloneExpr)); - + var e = (ApplySuffix) expr; + return CloneApplySuffix(e); } else if (expr is MemberSelectExpr) { var e = (MemberSelectExpr)expr; return new MemberSelectExpr(Tok(e.tok), CloneExpr(e.Obj), e.MemberName); @@ -319,6 +331,10 @@ var e = (SeqUpdateExpr)expr; return new SeqUpdateExpr(Tok(e.tok), CloneExpr(e.Seq), CloneExpr(e.Index), CloneExpr(e.Value)); + } else if (expr is DatatypeUpdateExpr) { + var e = (DatatypeUpdateExpr)expr; + return new DatatypeUpdateExpr(Tok(e.tok), CloneExpr(e.Root), e.Updates.ConvertAll(t => Tuple.Create(Tok(t.Item1), t.Item2, CloneExpr(t.Item3)))); + } else if (expr is FunctionCallExpr) { var e = (FunctionCallExpr)expr; return new FunctionCallExpr(Tok(e.tok), e.Name, CloneExpr(e.Receiver), e.OpenParen == null ? null : Tok(e.OpenParen), e.Args.ConvertAll(CloneExpr)); @@ -385,7 +401,8 @@ return new LambdaExpr(tk, l.OneShot, bvs, range, l.Reads.ConvertAll(CloneFrameExpr), term); } else { Contract.Assert(e is SetComprehension); - return new SetComprehension(tk, bvs, range, term, CloneAttributes(e.Attributes)); + var tt = (SetComprehension)e; + return new SetComprehension(tk, tt.Finite, bvs, range, term, CloneAttributes(e.Attributes)); } } else if (expr is WildcardExpr) { @@ -411,7 +428,7 @@ } else if (expr is MatchExpr) { var e = (MatchExpr)expr; return new MatchExpr(Tok(e.tok), CloneExpr(e.Source), - e.Cases.ConvertAll(c => new MatchCaseExpr(Tok(c.tok), c.Id, c.Arguments.ConvertAll(CloneBoundVar), CloneExpr(c.Body))), e.UsesOptionalBraces); + e.Cases.ConvertAll(CloneMatchCaseExpr), e.UsesOptionalBraces); } else if (expr is NegationExpression) { var e = (NegationExpression)expr; @@ -422,6 +439,22 @@ } } + public MatchCaseExpr CloneMatchCaseExpr(MatchCaseExpr c) { + Contract.Requires(c != null); + if (c.Arguments != null) { + Contract.Assert(c.CasePatterns == null); + return new MatchCaseExpr(Tok(c.tok), c.Id, c.Arguments.ConvertAll(CloneBoundVar), CloneExpr(c.Body)); + } else { + Contract.Assert(c.Arguments == null); + Contract.Assert(c.CasePatterns != null); + return new MatchCaseExpr(Tok(c.tok), c.Id, c.CasePatterns.ConvertAll(CloneCasePattern), CloneExpr(c.Body)); + } + } + + public virtual Expression CloneApplySuffix(ApplySuffix e) { + return new ApplySuffix(Tok(e.tok), CloneExpr(e.Lhs), e.Args.ConvertAll(CloneExpr)); + } + public virtual CasePattern CloneCasePattern(CasePattern pat) { Contract.Requires(pat != null); if (pat.Var != null) { @@ -433,6 +466,11 @@ } } + public virtual NameSegment CloneNameSegment(Expression expr) { + var e = (NameSegment)expr; + return new NameSegment(Tok(e.tok), e.Name, e.OptTypeArguments == null ? null : e.OptTypeArguments.ConvertAll(CloneType)); + } + public virtual AssignmentRhs CloneRHS(AssignmentRhs rhs) { AssignmentRhs c; if (rhs is ExprRhs) { @@ -505,7 +543,7 @@ } else if (stmt is IfStmt) { var s = (IfStmt)stmt; - r = new IfStmt(Tok(s.Tok), Tok(s.EndTok), CloneExpr(s.Guard), CloneBlockStmt(s.Thn), CloneStmt(s.Els)); + r = new IfStmt(Tok(s.Tok), Tok(s.EndTok), s.IsExistentialGuard, CloneExpr(s.Guard), CloneBlockStmt(s.Thn), CloneStmt(s.Els)); } else if (stmt is AlternativeStmt) { var s = (AlternativeStmt)stmt; @@ -522,15 +560,25 @@ } else if (stmt is ForallStmt) { var s = (ForallStmt)stmt; r = new ForallStmt(Tok(s.Tok), Tok(s.EndTok), s.BoundVars.ConvertAll(CloneBoundVar), null, CloneExpr(s.Range), s.Ens.ConvertAll(CloneMayBeFreeExpr), CloneStmt(s.Body)); - + if (s.ForallExpressions != null) { + ((ForallStmt)r).ForallExpressions = s.ForallExpressions.ConvertAll(CloneExpr); + } } else if (stmt is CalcStmt) { - var s = (CalcStmt)stmt; - r = new CalcStmt(Tok(s.Tok), Tok(s.EndTok), CloneCalcOp(s.Op), s.Lines.ConvertAll(CloneExpr), s.Hints.ConvertAll(CloneBlockStmt), s.StepOps.ConvertAll(CloneCalcOp), CloneCalcOp(s.ResultOp)); + var s = (CalcStmt)stmt; + // calc statements have the unusual property that the last line is duplicated. If that is the case (which + // we expect it to be here), we share the clone of that line as well. + var lineCount = s.Lines.Count; + var lines = new List(lineCount); + for (int i = 0; i < lineCount; i++) { + lines.Add(i == lineCount - 1 && 2 <= lineCount && s.Lines[i] == s.Lines[i - 1] ? lines[i - 1] : CloneExpr(s.Lines[i])); + } + Contract.Assert(lines.Count == lineCount); + r = new CalcStmt(Tok(s.Tok), Tok(s.EndTok), CloneCalcOp(s.Op), lines, s.Hints.ConvertAll(CloneBlockStmt), s.StepOps.ConvertAll(CloneCalcOp), CloneCalcOp(s.ResultOp), CloneAttributes(s.Attributes)); } else if (stmt is MatchStmt) { var s = (MatchStmt)stmt; r = new MatchStmt(Tok(s.Tok), Tok(s.EndTok), CloneExpr(s.Source), - s.Cases.ConvertAll(c => new MatchCaseStmt(Tok(c.tok), c.Id, c.Arguments.ConvertAll(CloneBoundVar), c.Body.ConvertAll(CloneStmt))), s.UsesOptionalBraces); + s.Cases.ConvertAll(CloneMatchCaseStmt), s.UsesOptionalBraces); } else if (stmt is AssignSuchThatStmt) { var s = (AssignSuchThatStmt)stmt; @@ -545,6 +593,10 @@ var lhss = s.Locals.ConvertAll(c => new LocalVariable(Tok(c.Tok), Tok(c.EndTok), c.Name, CloneType(c.OptionalType), c.IsGhost)); r = new VarDeclStmt(Tok(s.Tok), Tok(s.EndTok), lhss, (ConcreteUpdateStatement)CloneStmt(s.Update)); + } else if (stmt is LetStmt) { + var s = (LetStmt) stmt; + r = new LetStmt(Tok(s.Tok), Tok(s.EndTok), s.LHSs.ConvertAll(CloneCasePattern), s.RHSs.ConvertAll(CloneExpr)); + } else if (stmt is ModifyStmt) { var s = (ModifyStmt)stmt; var mod = CloneSpecFrameExpr(s.Mod); @@ -562,6 +614,18 @@ return r; } + public MatchCaseStmt CloneMatchCaseStmt(MatchCaseStmt c) { + Contract.Requires(c != null); + if (c.Arguments != null) { + Contract.Assert(c.CasePatterns == null); + return new MatchCaseStmt(Tok(c.tok), c.Id, c.Arguments.ConvertAll(CloneBoundVar), c.Body.ConvertAll(CloneStmt)); + } else { + Contract.Assert(c.Arguments == null); + Contract.Assert(c.CasePatterns != null); + return new MatchCaseStmt(Tok(c.tok), c.Id, c.CasePatterns.ConvertAll(CloneCasePattern), c.Body.ConvertAll(CloneStmt)); + } + } + public CalcStmt.CalcOp CloneCalcOp(CalcStmt.CalcOp op) { if (op is CalcStmt.BinaryCalcOp) { return new CalcStmt.BinaryCalcOp(((CalcStmt.BinaryCalcOp) op).Op); @@ -585,7 +649,7 @@ } public GuardedAlternative CloneGuardedAlternative(GuardedAlternative alt) { - return new GuardedAlternative(Tok(alt.Tok), CloneExpr(alt.Guard), alt.Body.ConvertAll(CloneStmt)); + return new GuardedAlternative(Tok(alt.Tok), alt.IsExistentialGuard, CloneExpr(alt.Guard), alt.Body.ConvertAll(CloneStmt)); } public Function CloneFunction(Function f, string newName = null) { @@ -603,16 +667,16 @@ if (f is Predicate) { return new Predicate(Tok(f.tok), newName, f.HasStaticKeyword, f.IsProtected, f.IsGhost, tps, formals, - req, reads, ens, decreases, body, Predicate.BodyOriginKind.OriginalOrInherited, CloneAttributes(f.Attributes), null); + req, reads, ens, decreases, body, Predicate.BodyOriginKind.OriginalOrInherited, CloneAttributes(f.Attributes), null, f); } else if (f is InductivePredicate) { return new InductivePredicate(Tok(f.tok), newName, f.HasStaticKeyword, f.IsProtected, tps, formals, - req, reads, ens, body, CloneAttributes(f.Attributes), null); + req, reads, ens, body, CloneAttributes(f.Attributes), null, f); } else if (f is CoPredicate) { return new CoPredicate(Tok(f.tok), newName, f.HasStaticKeyword, f.IsProtected, tps, formals, - req, reads, ens, body, CloneAttributes(f.Attributes), null); + req, reads, ens, body, CloneAttributes(f.Attributes), null, f); } else { return new Function(Tok(f.tok), newName, f.HasStaticKeyword, f.IsProtected, f.IsGhost, tps, formals, CloneType(f.ResultType), - req, reads, ens, decreases, body, CloneAttributes(f.Attributes), null); + req, reads, ens, decreases, body, CloneAttributes(f.Attributes), null, f); } } @@ -630,19 +694,19 @@ var body = CloneBlockStmt(m.Body); if (m is Constructor) { return new Constructor(Tok(m.tok), m.Name, tps, ins, - req, mod, ens, decreases, body, CloneAttributes(m.Attributes), null); + req, mod, ens, decreases, body, CloneAttributes(m.Attributes), null, m); } else if (m is InductiveLemma) { return new InductiveLemma(Tok(m.tok), m.Name, m.HasStaticKeyword, tps, ins, m.Outs.ConvertAll(CloneFormal), - req, mod, ens, decreases, body, CloneAttributes(m.Attributes), null); + req, mod, ens, decreases, body, CloneAttributes(m.Attributes), null, m); } else if (m is CoLemma) { return new CoLemma(Tok(m.tok), m.Name, m.HasStaticKeyword, tps, ins, m.Outs.ConvertAll(CloneFormal), - req, mod, ens, decreases, body, CloneAttributes(m.Attributes), null); + req, mod, ens, decreases, body, CloneAttributes(m.Attributes), null, m); } else if (m is Lemma) { return new Lemma(Tok(m.tok), m.Name, m.HasStaticKeyword, tps, ins, m.Outs.ConvertAll(CloneFormal), - req, mod, ens, decreases, body, CloneAttributes(m.Attributes), null); + req, mod, ens, decreases, body, CloneAttributes(m.Attributes), null, m); } else { return new Method(Tok(m.tok), m.Name, m.HasStaticKeyword, m.IsGhost, tps, ins, m.Outs.ConvertAll(CloneFormal), - req, mod, ens, decreases, body, CloneAttributes(m.Attributes), null); + req, mod, ens, decreases, body, CloneAttributes(m.Attributes), null, m); } } public virtual IToken Tok(IToken tok) { @@ -668,21 +732,27 @@ abstract class FixpointCloner : Cloner { protected readonly Expression k; - readonly Resolver resolver; - readonly string suffix; - protected FixpointCloner(Expression k, Resolver resolver) + protected readonly ErrorReporter reporter; + protected readonly string suffix; + protected FixpointCloner(Expression k, ErrorReporter reporter) { Contract.Requires(k != null); - Contract.Requires(resolver != null); + Contract.Requires(reporter != null); this.k = k; - this.resolver = resolver; + this.reporter = reporter; this.suffix = string.Format("#[{0}]", Printer.ExprToString(k)); } - protected void ReportAdditionalInformation(IToken tok, string s) - { - Contract.Requires(tok != null); - Contract.Requires(s != null); - resolver.ReportAdditionalInformation(tok, s + suffix, s.Length); + protected Expression CloneCallAndAddK(FunctionCallExpr e) { + Contract.Requires(e != null); + var receiver = CloneExpr(e.Receiver); + var args = new List(); + args.Add(k); + foreach (var arg in e.Args) { + args.Add(CloneExpr(arg)); + } + var fexp = new FunctionCallExpr(Tok(e.tok), e.Name + "#", receiver, e.OpenParen, args); + reporter.Info(MessageSource.Cloner, e.tok, e.Name + suffix); + return fexp; } } @@ -693,17 +763,18 @@ /// precondition (resp. postcondition) of the inductive lemma's (resp. colemma's) corresponding prefix lemma. /// It is assumed that the source expression has been resolved. Note, the "k" given to the constructor /// is not cloned with each use; it is simply used as is. + /// The resulting expression needs to be resolved by the caller. /// class FixpointLemmaSpecificationSubstituter : FixpointCloner { readonly bool isCoContext; readonly ISet friendlyCalls; - public FixpointLemmaSpecificationSubstituter(ISet friendlyCalls, Expression k, Resolver resolver, bool isCoContext) - : base(k, resolver) + public FixpointLemmaSpecificationSubstituter(ISet friendlyCalls, Expression k, ErrorReporter reporter, bool isCoContext) + : base(k, reporter) { Contract.Requires(friendlyCalls != null); Contract.Requires(k != null); - Contract.Requires(resolver != null); + Contract.Requires(reporter != null); this.isCoContext = isCoContext; this.friendlyCalls = friendlyCalls; } @@ -716,15 +787,7 @@ } else if (expr is FunctionCallExpr) { var e = (FunctionCallExpr)expr; if (friendlyCalls.Contains(e)) { - var receiver = CloneExpr(e.Receiver); - var args = new List(); - args.Add(k); - foreach (var arg in e.Args) { - args.Add(CloneExpr(arg)); - } - var fexp = new FunctionCallExpr(Tok(e.tok), e.Name + "#", receiver, e.OpenParen, args); - ReportAdditionalInformation(e.tok, e.Name); - return fexp; + return CloneCallAndAddK(e); } } else if (expr is BinaryExpr && isCoContext) { var e = (BinaryExpr)expr; @@ -734,7 +797,7 @@ var B = CloneExpr(e.E1); var teq = new TernaryExpr(Tok(e.tok), op, k, A, B); var opString = op == TernaryExpr.Opcode.PrefixEqOp ? "==" : "!="; - ReportAdditionalInformation(e.tok, opString); + reporter.Info(MessageSource.Cloner, e.tok, opString + suffix); return teq; } } @@ -763,19 +826,77 @@ } /// - /// The task of the FixpointLemmaBodyCloner is to fill in the implicit _k-1 arguments in recursive inductive/co-lemma calls. + /// The task of the FixpointLemmaBodyCloner is to fill in the implicit _k-1 arguments in recursive inductive/co-lemma calls + /// and in calls to the focal predicates. /// The source statement and the given "k" are assumed to have been resolved. /// class FixpointLemmaBodyCloner : FixpointCloner { readonly FixpointLemma context; - public FixpointLemmaBodyCloner(FixpointLemma context, Expression k, Resolver resolver) - : base(k, resolver) + readonly ISet focalPredicates; + public FixpointLemmaBodyCloner(FixpointLemma context, Expression k, ISet focalPredicates, ErrorReporter reporter) + : base(k, reporter) { Contract.Requires(context != null); Contract.Requires(k != null); - Contract.Requires(resolver != null); + Contract.Requires(reporter != null); this.context = context; + this.focalPredicates = focalPredicates; + } + public override Expression CloneExpr(Expression expr) { + if (DafnyOptions.O.RewriteFocalPredicates) { + if (expr is FunctionCallExpr) { + var e = (FunctionCallExpr)expr; +#if DEBUG_PRINT + if (e.Function.Name.EndsWith("#") && Contract.Exists(focalPredicates, p => e.Function.Name == p.Name + "#")) { + Console.WriteLine("{0}({1},{2}): DEBUG: Possible opportunity to rely on new rewrite: {3}", e.tok.filename, e.tok.line, e.tok.col, Printer.ExprToString(e)); + } +#endif + // Note, we don't actually ever get here, because all calls will have been parsed as ApplySuffix. + // However, if something changes in the future (for example, some rewrite that changing an ApplySuffix + // to its resolved FunctionCallExpr), then we do want this code, so with the hope of preventing + // some error in the future, this case is included. (Of course, it is currently completely untested!) + var f = e.Function as FixpointPredicate; + if (f != null && focalPredicates.Contains(f)) { +#if DEBUG_PRINT + var r = CloneCallAndAddK(e); + Console.WriteLine("{0}({1},{2}): DEBUG: Rewrote extreme predicate into prefix predicate: {3}", e.tok.filename, e.tok.line, e.tok.col, Printer.ExprToString(r)); + return r; +#else + return CloneCallAndAddK(e); +#endif + } + } else if (expr is ApplySuffix) { + var apply = (ApplySuffix)expr; + if (!apply.WasResolved()) { + // Since we're assuming the enclosing statement to have been resolved, this ApplySuffix must + // be part of an ExprRhs that actually designates a method call. Such an ApplySuffix does + // not get listed as being resolved, but its components (like its .Lhs) are resolved. + var mse = (MemberSelectExpr)apply.Lhs.Resolved; + Contract.Assume(mse.Member is Method); + } else { + var fce = apply.Resolved as FunctionCallExpr; + if (fce != null) { +#if DEBUG_PRINT + if (fce.Function.Name.EndsWith("#") && Contract.Exists(focalPredicates, p => fce.Function.Name == p.Name + "#")) { + Console.WriteLine("{0}({1},{2}): DEBUG: Possible opportunity to rely on new rewrite: {3}", fce.tok.filename, fce.tok.line, fce.tok.col, Printer.ExprToString(fce)); + } +#endif + var f = fce.Function as FixpointPredicate; + if (f != null && focalPredicates.Contains(f)) { +#if DEBUG_PRINT + var r = CloneCallAndAddK(fce); + Console.WriteLine("{0}({1},{2}): DEBUG: Rewrote extreme predicate into prefix predicate: {3}", fce.tok.filename, fce.tok.line, fce.tok.col, Printer.ExprToString(r)); + return r; +#else + return CloneCallAndAddK(fce); +#endif + } + } + } + } + } + return base.CloneExpr(expr); } public override AssignmentRhs CloneRHS(AssignmentRhs rhs) { var r = rhs as ExprRhs; @@ -799,7 +920,7 @@ apply.Args.ForEach(arg => args.Add(CloneExpr(arg))); var applyClone = new ApplySuffix(Tok(apply.tok), lhsClone, args); var c = new ExprRhs(applyClone); - ReportAdditionalInformation(apply.tok, mse.Member.Name); + reporter.Info(MessageSource.Cloner, apply.Lhs.tok, mse.Member.Name + suffix); return c; } } diff -Nru dafny-1.9.5/Source/Dafny/Compiler.cs dafny-1.9.7/Source/Dafny/Compiler.cs --- dafny-1.9.5/Source/Dafny/Compiler.cs 2015-05-11 08:03:26.000000000 +0000 +++ dafny-1.9.7/Source/Dafny/Compiler.cs 2016-06-05 21:11:14.000000000 +0000 @@ -14,18 +14,11 @@ namespace Microsoft.Dafny { public class Compiler { - public Compiler(TextWriter wr) { - Contract.Requires(wr != null); - this.wr = wr; + public Compiler() { + } - [ContractInvariantMethod] - void ObjectInvariant() - { - Contract.Invariant(wr!=null); - } - - TextWriter wr; + StringBuilder copyInstrWriter = new StringBuilder(); // a buffer that stores copy instructions generated by letExpr that uses out param. Method enclosingMethod; // non-null when a method body is being translated FreshIdGenerator idGenerator = new FreshIdGenerator(); @@ -50,7 +43,7 @@ public int ErrorCount; public TextWriter ErrorWriter = Console.Out; - void Error(string msg, params object[] args) { + void Error(string msg, TextWriter wr, params object[] args) { Contract.Requires(msg != null); Contract.Requires(args != null); @@ -60,7 +53,7 @@ ErrorCount++; } - void ReadRuntimeSystem() { + void ReadRuntimeSystem(TextWriter wr) { string codebase = cce.NonNull( System.IO.Path.GetDirectoryName(cce.NonNull(System.Reflection.Assembly.GetExecutingAssembly().Location))); string path = System.IO.Path.Combine(codebase, "DafnyRuntime.cs"); using (TextReader rd = new StreamReader(new FileStream(path, System.IO.FileMode.Open, System.IO.FileAccess.Read))) @@ -75,16 +68,16 @@ } readonly int IndentAmount = 2; - void Indent(int ind) { + void Indent(int ind, TextWriter wr) { Contract.Requires(0 <= ind); string spaces = " "; for (; spaces.Length < ind; ind -= spaces.Length) { wr.Write(spaces); } - wr.Write(spaces.Substring(0, ind)); + wr.Write(spaces.Substring(0, ind)); } - public void Compile(Program program) { + public void Compile(Program program, TextWriter wr) { Contract.Requires(program != null); wr.WriteLine("// Dafny program {0} compiled into C#", program.Name); wr.WriteLine("// To recompile, use 'csc' with: /r:System.Numerics.dll"); @@ -92,8 +85,8 @@ wr.WriteLine("// You might also want to include compiler switches like:"); wr.WriteLine("// /debug /nowarn:0164 /nowarn:0219"); wr.WriteLine(); - ReadRuntimeSystem(); - CompileBuiltIns(program.BuiltIns); + ReadRuntimeSystem(wr); + CompileBuiltIns(program.BuiltIns, wr); foreach (ModuleDefinition m in program.CompileModules) { if (m.IsAbstract) { @@ -102,7 +95,11 @@ } int indent = 0; if (!m.IsDefaultModule) { - wr.WriteLine("namespace @{0} {{", m.CompileName); + var m_prime = m; + while (DafnyOptions.O.IronDafny && m_prime.ClonedFrom != null) { + m_prime = m.ClonedFrom; + } + wr.WriteLine("namespace @{0} {{", m_prime.CompileName); indent += IndentAmount; } foreach (TopLevelDecl d in m.TopLevelDecls) { @@ -113,21 +110,33 @@ wr.WriteLine(); if (d is OpaqueTypeDecl) { var at = (OpaqueTypeDecl)d; - Error("Opaque type ('{0}') cannot be compiled", at.FullName); + Error("Opaque type ('{0}') cannot be compiled", wr, at.FullName); } else if (d is TypeSynonymDecl) { // do nothing, just bypass type synonyms in the compiler } else if (d is NewtypeDecl) { - // do nothing, just bypass newtypes in the compiler + var nt = (NewtypeDecl)d; + Indent(indent, wr); + wr.WriteLine("public class @{0} {{", nt.CompileName); + if (nt.NativeType != null) { + Indent(indent + IndentAmount, wr); + wr.WriteLine("public static System.Collections.Generic.IEnumerable<{0}> IntegerRange(BigInteger lo, BigInteger hi) {{", nt.NativeType.Name); + Indent(indent + 2 * IndentAmount, wr); + wr.WriteLine("for (var j = lo; j < hi; j++) {{ yield return ({0})j; }}", nt.NativeType.Name); + Indent(indent + IndentAmount, wr); + wr.WriteLine("}"); + } + Indent(indent, wr); + wr.WriteLine("}"); } else if (d is DatatypeDecl) { var dt = (DatatypeDecl)d; - Indent(indent); + Indent(indent, wr); wr.Write("public abstract class Base_{0}", dt.CompileName); if (dt.TypeArgs.Count != 0) { wr.Write("<{0}>", TypeParameters(dt.TypeArgs)); } wr.WriteLine(" { }"); - CompileDatatypeConstructors(dt, indent); - CompileDatatypeStruct(dt, indent); + CompileDatatypeConstructors(dt, indent, wr); + CompileDatatypeStruct(dt, indent, wr); } else if (d is IteratorDecl) { var iter = (IteratorDecl)d; // An iterator is compiled as follows: @@ -153,7 +162,7 @@ // } // } - Indent(indent); + Indent(indent, wr); wr.Write("public class @{0}", iter.CompileName); if (iter.TypeArgs.Count != 0) { wr.Write("<{0}>", TypeParameters(iter.TypeArgs)); @@ -165,58 +174,58 @@ foreach (var member in iter.Members) { var f = member as Field; if (f != null && !f.IsGhost) { - Indent(ind); - wr.WriteLine("public {0} @{1} = {2};", TypeName(f.Type), f.CompileName, DefaultValue(f.Type)); + Indent(ind, wr); + wr.WriteLine("public {0} @{1} = {2};", TypeName(f.Type, wr), f.CompileName, DefaultValue(f.Type, wr)); } else if (member is Constructor) { Contract.Assert(ct == null); // we're expecting just one constructor ct = (Constructor)member; } } Contract.Assert(ct != null); // we do expect a constructor - Indent(ind); wr.WriteLine("System.Collections.Generic.IEnumerator __iter;"); + Indent(ind, wr); wr.WriteLine("System.Collections.Generic.IEnumerator __iter;"); // here's the initializer method - Indent(ind); wr.Write("public void @{0}(", ct.CompileName); + Indent(ind, wr); wr.Write("public void @{0}(", ct.CompileName); string sep = ""; foreach (var p in ct.Ins) { if (!p.IsGhost) { // here we rely on the parameters and the corresponding fields having the same names - wr.Write("{0}{1} @{2}", sep, TypeName(p.Type), p.CompileName); + wr.Write("{0}{1} @{2}", sep, TypeName(p.Type, wr), p.CompileName); sep = ", "; } } wr.WriteLine(") {"); foreach (var p in ct.Ins) { if (!p.IsGhost) { - Indent(ind + IndentAmount); + Indent(ind + IndentAmount, wr); wr.WriteLine("this.@{0} = @{0};", p.CompileName); } } - Indent(ind + IndentAmount); wr.WriteLine("__iter = TheIterator();"); - Indent(ind); wr.WriteLine("}"); + Indent(ind + IndentAmount, wr); wr.WriteLine("__iter = TheIterator();"); + Indent(ind, wr); wr.WriteLine("}"); // here are the enumerator methods - Indent(ind); wr.WriteLine("public void MoveNext(out bool more) { more = __iter.MoveNext(); }"); - Indent(ind); wr.WriteLine("private System.Collections.Generic.IEnumerator TheIterator() {"); + Indent(ind, wr); wr.WriteLine("public void MoveNext(out bool more) { more = __iter.MoveNext(); }"); + Indent(ind, wr); wr.WriteLine("private System.Collections.Generic.IEnumerator TheIterator() {"); if (iter.Body == null) { - Error("Iterator {0} has no body", iter.FullName); + Error("Iterator {0} has no body", wr, iter.FullName); } else { - TrStmt(iter.Body, ind + IndentAmount); + wr.Write(TrStmt(iter.Body, ind + IndentAmount).ToString()); } - Indent(ind + IndentAmount); wr.WriteLine("yield break;"); - Indent(ind); wr.WriteLine("}"); + Indent(ind + IndentAmount, wr); wr.WriteLine("yield break;"); + Indent(ind, wr); wr.WriteLine("}"); // end of the class - Indent(indent); wr.WriteLine("}"); + Indent(indent, wr); wr.WriteLine("}"); } else if (d is TraitDecl) { //writing the trait var trait = (TraitDecl)d; - Indent(indent); + Indent(indent, wr); wr.Write("public interface @{0}", trait.CompileName); wr.WriteLine(" {"); - CompileClassMembers(trait, false, indent + IndentAmount); - Indent(indent); wr.WriteLine("}"); + CompileClassMembers(trait, false, indent + IndentAmount, wr); + Indent(indent, wr); wr.WriteLine("}"); //writing the _Companion class List members = new List(); @@ -236,27 +245,27 @@ } } } - Indent(indent); + Indent(indent, wr); wr.Write("public class @_Companion_{0}", trait.CompileName); wr.WriteLine(" {"); - CompileClassMembers(trait, true, indent + IndentAmount); - Indent(indent); wr.WriteLine("}"); + CompileClassMembers(trait, true, indent + IndentAmount, wr); + Indent(indent, wr); wr.WriteLine("}"); } else if (d is ClassDecl) { var cl = (ClassDecl)d; - Indent(indent); - wr.Write("public class @{0}", cl.CompileName); + Indent(indent, wr); + wr.Write("public partial class @{0}", cl.CompileName); if (cl.TypeArgs.Count != 0) { wr.Write("<{0}>", TypeParameters(cl.TypeArgs)); } string sep = " : "; foreach (var trait in cl.TraitsTyp) { - wr.Write("{0}{1}", sep, TypeName(trait)); + wr.Write("{0}{1}", sep, TypeName(trait, wr)); sep = ", "; } wr.WriteLine(" {"); - CompileClassMembers(cl, false, indent+IndentAmount); - Indent(indent); wr.WriteLine("}"); + CompileClassMembers(cl, false, indent+IndentAmount, wr); + Indent(indent, wr); wr.WriteLine("}"); } else if (d is ModuleDecl) { // nop } else { Contract.Assert(false); } @@ -267,15 +276,15 @@ } } - void CompileBuiltIns(BuiltIns builtIns) { + void CompileBuiltIns(BuiltIns builtIns, TextWriter wr) { wr.WriteLine("namespace Dafny {"); - Indent(IndentAmount); + Indent(IndentAmount, wr); wr.WriteLine("public partial class Helpers {"); foreach (var decl in builtIns.SystemModule.TopLevelDecls) { if (decl is ArrayClassDecl) { int dims = ((ArrayClassDecl)decl).Dims; // public static T[,] InitNewArray2(BigInteger size0, BigInteger size1) { - Indent(3 * IndentAmount); + Indent(3 * IndentAmount, wr); wr.Write("public static T["); RepeatWrite(wr, dims, "", ","); wr.Write("] InitNewArray{0}(", dims); @@ -283,52 +292,52 @@ wr.WriteLine(") {"); // int s0 = (int)size0; for (int i = 0; i < dims; i++) { - Indent(4 * IndentAmount); + Indent(4 * IndentAmount, wr); wr.WriteLine("int s{0} = (int)size{0};", i); } // T[,] a = new T[s0, s1]; - Indent(4 * IndentAmount); + Indent(4 * IndentAmount, wr); wr.Write("T["); RepeatWrite(wr, dims, "", ","); wr.Write("] a = new T["); RepeatWrite(wr, dims, "s{0}", ","); wr.WriteLine("];"); // BigInteger[,] b = a as BigInteger[,]; - Indent(4 * IndentAmount); + Indent(4 * IndentAmount, wr); wr.Write("BigInteger["); RepeatWrite(wr, dims, "", ","); wr.Write("] b = a as BigInteger["); RepeatWrite(wr, dims, "", ","); wr.WriteLine("];"); // if (b != null) { - Indent(4 * IndentAmount); + Indent(4 * IndentAmount, wr); wr.WriteLine("if (b != null) {"); // BigInteger z = new BigInteger(0); - Indent(5 * IndentAmount); + Indent(5 * IndentAmount, wr); wr.WriteLine("BigInteger z = new BigInteger(0);"); // for (int i0 = 0; i0 < s0; i0++) // for (int i1 = 0; i1 < s1; i1++) for (int i = 0; i < dims; i++) { - Indent((5+i) * IndentAmount); + Indent((5+i) * IndentAmount, wr); wr.WriteLine("for (int i{0} = 0; i{0} < s{0}; i{0}++)", i); } // b[i0,i1] = z; - Indent((5+dims) * IndentAmount); + Indent((5+dims) * IndentAmount, wr); wr.Write("b["); RepeatWrite(wr, dims, "i{0}", ","); wr.WriteLine("] = z;"); // } - Indent(4 * IndentAmount); + Indent(4 * IndentAmount, wr); wr.WriteLine("}"); // return a; - Indent(4 * IndentAmount); + Indent(4 * IndentAmount, wr); wr.WriteLine("return a;"); // } - Indent(3 * IndentAmount); + Indent(3 * IndentAmount, wr); wr.WriteLine("}"); // end of method } } - Indent(IndentAmount); + Indent(IndentAmount, wr); wr.WriteLine("}"); // end of class Helpers wr.WriteLine("}"); // end of namespace } @@ -343,7 +352,7 @@ } } - void CompileDatatypeConstructors(DatatypeDecl dt, int indent) + void CompileDatatypeConstructors(DatatypeDecl dt, int indent, TextWriter wr) { Contract.Requires(dt != null); @@ -356,20 +365,20 @@ // public Dt__Lazy(Computer c) { this.c = c; } // public Base_Dt Get() { return c(); } // } - Indent(indent); + Indent(indent, wr); wr.WriteLine("public class {0}__Lazy{1} : Base_{0}{1} {{", dt.CompileName, typeParams); int ind = indent + IndentAmount; - Indent(ind); + Indent(ind, wr); wr.WriteLine("public delegate Base_{0}{1} Computer();", dt.CompileName, typeParams); - Indent(ind); + Indent(ind, wr); wr.WriteLine("public delegate Computer ComputerComputer();"); - Indent(ind); + Indent(ind, wr); wr.WriteLine("Computer c;"); - Indent(ind); + Indent(ind, wr); wr.WriteLine("public {0}__Lazy(Computer c) {{ this.c = c; }}", dt.CompileName); - Indent(ind); + Indent(ind, wr); wr.WriteLine("public Base_{0}{1} Get() {{ return c(); }}", dt.CompileName, typeParams); - Indent(indent); + Indent(indent, wr); wr.WriteLine("}"); } @@ -391,7 +400,7 @@ // // ... // } // } - Indent(indent); + Indent(indent, wr); wr.Write("public class {0}", DtCtorDeclarationName(ctor, dt.TypeArgs)); wr.WriteLine(" : Base_{0}{1} {{", dt.CompileName, typeParams); int ind = indent + IndentAmount; @@ -399,32 +408,32 @@ int i = 0; foreach (Formal arg in ctor.Formals) { if (!arg.IsGhost) { - Indent(ind); - wr.WriteLine("public readonly {0} @{1};", TypeName(arg.Type), FormalName(arg, i)); + Indent(ind, wr); + wr.WriteLine("public readonly {0} @{1};", TypeName(arg.Type, wr), FormalName(arg, i)); i++; } } - Indent(ind); + Indent(ind, wr); wr.Write("public {0}(", DtCtorDeclartionName(ctor)); - WriteFormals("", ctor.Formals); + WriteFormals("", ctor.Formals, wr); wr.WriteLine(") {"); i = 0; foreach (Formal arg in ctor.Formals) { if (!arg.IsGhost) { - Indent(ind + IndentAmount); + Indent(ind + IndentAmount, wr); wr.WriteLine("this.@{0} = @{0};", FormalName(arg, i)); i++; } } - Indent(ind); wr.WriteLine("}"); + Indent(ind, wr); wr.WriteLine("}"); // Equals method - Indent(ind); wr.WriteLine("public override bool Equals(object other) {"); - Indent(ind + IndentAmount); + Indent(ind, wr); wr.WriteLine("public override bool Equals(object other) {"); + Indent(ind + IndentAmount, wr); wr.Write("var oth = other as {0}", DtCtorName(ctor, dt.TypeArgs)); wr.WriteLine(";"); - Indent(ind + IndentAmount); + Indent(ind + IndentAmount, wr); wr.Write("return oth != null"); i = 0; foreach (Formal arg in ctor.Formals) { @@ -439,56 +448,76 @@ } } wr.WriteLine(";"); - Indent(ind); wr.WriteLine("}"); - - // GetHashCode method - Indent(ind); wr.WriteLine("public override int GetHashCode() {"); - Indent(ind + IndentAmount); wr.Write("return " + constructorIndex); + Indent(ind, wr); wr.WriteLine("}"); + // GetHashCode method (Uses the djb2 algorithm) + Indent(ind, wr); wr.WriteLine("public override int GetHashCode() {"); + Indent(ind + IndentAmount, wr); wr.WriteLine("ulong hash = 5381;"); + Indent(ind + IndentAmount, wr); wr.WriteLine("hash = ((hash << 5) + hash) + {0};", constructorIndex); i = 0; foreach (Formal arg in ctor.Formals) { if (!arg.IsGhost) { string nm = FormalName(arg, i); - wr.Write(" ^ this.@{0}.GetHashCode()", nm); + Indent(ind + IndentAmount, wr); wr.WriteLine("hash = ((hash << 5) + hash) + ((ulong)this.@{0}.GetHashCode());", nm); i++; } } - wr.WriteLine(";"); - Indent(ind); wr.WriteLine("}"); + Indent(ind + IndentAmount, wr); wr.WriteLine("return (int) hash;"); + Indent(ind, wr); wr.WriteLine("}"); if (dt is IndDatatypeDecl) { - Indent(ind); wr.WriteLine("public override string ToString() {"); + Indent(ind, wr); wr.WriteLine("public override string ToString() {"); string nm; if (dt is TupleTypeDecl) { nm = ""; } else { nm = (dt.Module.IsDefaultModule ? "" : dt.Module.CompileName + ".") + dt.CompileName + "." + ctor.CompileName; } - Indent(ind + IndentAmount); wr.WriteLine("string s = \"{0}\";", nm); + var tempVar = GenVarName("s", ctor.Formals); + Indent(ind + IndentAmount, wr); wr.WriteLine("string {0} = \"{1}\";", tempVar, nm); if (ctor.Formals.Count != 0) { - Indent(ind + IndentAmount); wr.WriteLine("s += \"(\";"); + Indent(ind + IndentAmount, wr); wr.WriteLine("{0} += \"(\";", tempVar); i = 0; foreach (var arg in ctor.Formals) { if (!arg.IsGhost) { if (i != 0) { - Indent(ind + IndentAmount); wr.WriteLine("s += \", \";"); + Indent(ind + IndentAmount, wr); wr.WriteLine("{0} += \", \";", tempVar); } - Indent(ind + IndentAmount); wr.WriteLine("s += @{0}.ToString();", FormalName(arg, i)); + Indent(ind + IndentAmount, wr); wr.WriteLine("{0} += @{1}.ToString();", tempVar,FormalName(arg, i)); i++; } } - Indent(ind + IndentAmount); wr.WriteLine("s += \")\";"); + Indent(ind + IndentAmount, wr); wr.WriteLine("{0} += \")\";", tempVar); } - Indent(ind + IndentAmount); wr.WriteLine("return s;"); - Indent(ind); wr.WriteLine("}"); + Indent(ind + IndentAmount, wr); wr.WriteLine("return {0};", tempVar); + Indent(ind, wr); wr.WriteLine("}"); } - Indent(indent); wr.WriteLine("}"); + Indent(indent, wr); wr.WriteLine("}"); } constructorIndex++; } - void CompileDatatypeStruct(DatatypeDecl dt, int indent) { + // create a varName that is not a duplicate of formals' name + string GenVarName(string root, List formals) { + bool finished = false; + while (!finished) { + finished = true; + int i = 0; + foreach (var arg in formals) { + if (!arg.IsGhost) { + if (root.Equals(FormalName(arg, i))) { + root += root; + finished = false; + } + i++; + } + } + } + return root; + } + + void CompileDatatypeStruct(DatatypeDecl dt, int indent, TextWriter wr) { Contract.Requires(dt != null); // public struct Dt : IDatatype{ @@ -532,46 +561,46 @@ DtT += DtT_TypeArgs; } - Indent(indent); + Indent(indent, wr); wr.WriteLine("public struct @{0} {{", DtT); int ind = indent + IndentAmount; - Indent(ind); + Indent(ind, wr); wr.WriteLine("Base_{0} _d;", DtT); - Indent(ind); + Indent(ind, wr); wr.WriteLine("public Base_{0} _D {{", DtT); - Indent(ind + IndentAmount); + Indent(ind + IndentAmount, wr); wr.WriteLine("get {"); - Indent(ind + 2 * IndentAmount); + Indent(ind + 2 * IndentAmount, wr); wr.WriteLine("if (_d == null) {"); - Indent(ind + 3 * IndentAmount); + Indent(ind + 3 * IndentAmount, wr); wr.WriteLine("_d = Default;"); if (dt is CoDatatypeDecl) { string typeParams = dt.TypeArgs.Count == 0 ? "" : string.Format("<{0}>", TypeParameters(dt.TypeArgs)); - Indent(ind + 2 * IndentAmount); + Indent(ind + 2 * IndentAmount, wr); wr.WriteLine("}} else if (_d is {0}__Lazy{1}) {{", dt.CompileName, typeParams); - Indent(ind + 3 * IndentAmount); + Indent(ind + 3 * IndentAmount, wr); wr.WriteLine("_d = (({0}__Lazy{1})_d).Get();", dt.CompileName, typeParams); } - Indent(ind + 2 * IndentAmount); wr.WriteLine("}"); - Indent(ind + 2 * IndentAmount); wr.WriteLine("return _d;"); - Indent(ind + IndentAmount); wr.WriteLine("}"); - Indent(ind); wr.WriteLine("}"); + Indent(ind + 2 * IndentAmount, wr); wr.WriteLine("}"); + Indent(ind + 2 * IndentAmount, wr); wr.WriteLine("return _d;"); + Indent(ind + IndentAmount, wr); wr.WriteLine("}"); + Indent(ind, wr); wr.WriteLine("}"); - Indent(ind); + Indent(ind, wr); wr.WriteLine("public @{0}(Base_{1} d) {{ this._d = d; }}", dt.CompileName, DtT); - Indent(ind); + Indent(ind, wr); wr.WriteLine("static Base_{0} theDefault;", DtT); - Indent(ind); + Indent(ind, wr); wr.WriteLine("public static Base_{0} Default {{", DtT); - Indent(ind + IndentAmount); + Indent(ind + IndentAmount, wr); wr.WriteLine("get {"); - Indent(ind + 2 * IndentAmount); + Indent(ind + 2 * IndentAmount, wr); wr.WriteLine("if (theDefault == null) {"); - Indent(ind + 3 * IndentAmount); + Indent(ind + 3 * IndentAmount, wr); wr.Write("theDefault = "); DatatypeCtor defaultCtor; @@ -585,55 +614,55 @@ string sep = ""; foreach (Formal f in defaultCtor.Formals) { if (!f.IsGhost) { - wr.Write("{0}{1}", sep, DefaultValue(f.Type)); + wr.Write("{0}{1}", sep, DefaultValue(f.Type, wr)); sep = ", "; } } wr.Write(")"); wr.WriteLine(";"); - Indent(ind + 2 * IndentAmount); + Indent(ind + 2 * IndentAmount, wr); wr.WriteLine("}"); - Indent(ind + 2 * IndentAmount); + Indent(ind + 2 * IndentAmount, wr); wr.WriteLine("return theDefault;"); - Indent(ind + IndentAmount); wr.WriteLine("}"); + Indent(ind + IndentAmount, wr); wr.WriteLine("}"); - Indent(ind); wr.WriteLine("}"); + Indent(ind, wr); wr.WriteLine("}"); - Indent(ind); wr.WriteLine("public override bool Equals(object other) {"); - Indent(ind + IndentAmount); + Indent(ind, wr); wr.WriteLine("public override bool Equals(object other) {"); + Indent(ind + IndentAmount, wr); wr.WriteLine("return other is @{0} && _D.Equals(((@{0})other)._D);", DtT); - Indent(ind); wr.WriteLine("}"); + Indent(ind, wr); wr.WriteLine("}"); - Indent(ind); + Indent(ind, wr); wr.WriteLine("public override int GetHashCode() { return _D.GetHashCode(); }"); if (dt is IndDatatypeDecl) { - Indent(ind); + Indent(ind, wr); wr.WriteLine("public override string ToString() { return _D.ToString(); }"); } // query properties foreach (var ctor in dt.Ctors) { // public bool is_Ctor0 { get { return _D is Dt_Ctor0; } } - Indent(ind); + Indent(ind, wr); wr.WriteLine("public bool is_{0} {{ get {{ return _D is {1}_{0}{2}; }} }}", ctor.CompileName, dt.CompileName, DtT_TypeArgs); } if (dt.HasFinitePossibleValues) { - Indent(ind); + Indent(ind, wr); wr.WriteLine("public static System.Collections.Generic.IEnumerable<@{0}> AllSingletonConstructors {{", DtT); - Indent(ind + IndentAmount); + Indent(ind + IndentAmount, wr); wr.WriteLine("get {"); foreach (var ctr in dt.Ctors) { if (ctr.Formals.Count == 0) { - Indent(ind + IndentAmount + IndentAmount); + Indent(ind + IndentAmount + IndentAmount, wr); wr.WriteLine("yield return new @{0}(new {2}_{1}());", DtT, ctr.CompileName, dt.CompileName); } } - Indent(ind + IndentAmount + IndentAmount); + Indent(ind + IndentAmount + IndentAmount, wr); wr.WriteLine("yield break;"); - Indent(ind + IndentAmount); + Indent(ind + IndentAmount, wr); wr.WriteLine("}"); - Indent(ind); + Indent(ind, wr); wr.WriteLine("}"); } @@ -642,17 +671,17 @@ foreach (var arg in ctor.Formals) { if (!arg.IsGhost && arg.HasName) { // public T0 @Dtor0 { get { return ((DT_Ctor)_D).@Dtor0; } } - Indent(ind); - wr.WriteLine("public {0} dtor_{1} {{ get {{ return (({2}_{3}{4})_D).@{1}; }} }}", TypeName(arg.Type), arg.CompileName, dt.CompileName, ctor.CompileName, DtT_TypeArgs); + Indent(ind, wr); + wr.WriteLine("public {0} dtor_{1} {{ get {{ return (({2}_{3}{4})_D).@{1}; }} }}", TypeName(arg.Type, wr), arg.CompileName, dt.CompileName, ctor.CompileName, DtT_TypeArgs); } } } - Indent(indent); + Indent(indent, wr); wr.WriteLine("}"); } - int WriteFormals(string sep, List/*!*/ formals) + int WriteFormals(string sep, List/*!*/ formals, TextWriter wr) { Contract.Requires(sep != null); Contract.Requires(cce.NonNullElements(formals)); @@ -660,7 +689,7 @@ foreach (Formal arg in formals) { if (!arg.IsGhost) { string name = FormalName(arg, i); - wr.Write("{0}{1}{2} @{3}", sep, arg.InParam ? "" : "out ", TypeName(arg.Type), name); + wr.Write("{0}{1}{2} @{3}", sep, arg.InParam ? "" : "out ", TypeName(arg.Type, wr), name); sep = ", "; i++; } @@ -676,7 +705,11 @@ } string DtName(DatatypeDecl decl) { - return decl.Module.IsDefaultModule ? decl.CompileName : decl.FullCompileName; + var d = (TopLevelDecl)decl; + while (DafnyOptions.O.IronDafny && d.ClonedFrom != null) { + d = (TopLevelDecl)d.ClonedFrom; + } + return d.Module.IsDefaultModule ? d.CompileName : d.FullCompileName; } string DtCtorName(DatatypeCtor ctor) { Contract.Requires(ctor != null); @@ -712,51 +745,80 @@ return s; } - string DtCtorName(DatatypeCtor ctor, List typeArgs) { + string DtCtorName(DatatypeCtor ctor, List typeArgs, TextWriter wr) { Contract.Requires(ctor != null); Contract.Ensures(Contract.Result() != null); var s = DtCtorName(ctor); if (typeArgs != null && typeArgs.Count != 0) { - s += "<" + TypeNames(typeArgs) + ">"; + s += "<" + TypeNames(typeArgs, wr) + ">"; } return s; } public bool HasMain(Program program) { + Method mainMethod = null; + bool hasMain = false; foreach (var module in program.Modules) { + if (module.IsAbstract) { + // the purpose of an abstract module is to skip compilation + continue; + } foreach (var decl in module.TopLevelDecls) { var c = decl as ClassDecl; if (c != null) { foreach (var member in c.Members) { var m = member as Method; if (m != null && IsMain(m)) { - return true; + if (mainMethod == null) { + mainMethod = m; + hasMain = true; + } else { + // more than one main in the program + ErrorWriter.WriteLine("More than one method is declared as \"main\""); + ErrorCount++; + hasMain = false; + } } } } } } - return false; + return hasMain; } public static bool IsMain(Method m) { // In order to be a legal Main() method, the following must be true: - // The method takes no parameters + // The method takes no parameters // The method is not a ghost method // The method has no requires clause // The method has no modifies clause // If the method is an instance (that is, non-static) method in a class, then the enclosing class must not declare any constructor - if (!m.IsGhost && m.Name == "Main" && m.TypeArgs.Count == 0 && m.Ins.Count == 0 && m.Outs.Count == 0 && m.Req.Count == 0 + // Or if a method is annotated with {:main} and the above restrictions apply, except it is allowed to take ghost arguments, + // and it is allowed to have preconditions and modifies. This lets the programmer add some explicit assumptions about the outside world, + // modeled, for example, via ghost parameters. + if (!m.IsGhost && m.Name == "Main" && m.TypeArgs.Count == 0 && m.Ins.Count == 0 && m.Outs.Count == 0 && m.Req.Count == 0 && m.Mod.Expressions.Count == 0 && (m.IsStatic || (((ClassDecl)m.EnclosingClass) == null) || !((ClassDecl)m.EnclosingClass).HasConstructor)) { return true; - } - else { + } else if (Attributes.Contains(m.Attributes, "main") && !m.IsGhost && m.TypeArgs.Count == 0 && m.Outs.Count == 0 + && (m.IsStatic || (((ClassDecl)m.EnclosingClass) == null) || !((ClassDecl)m.EnclosingClass).HasConstructor)) { + if (m.Ins.Count == 0) { + return true; + } else { + bool isGhost = true; + foreach (var arg in m.Ins) { + if (!arg.IsGhost) { + isGhost = false; + } + } + return isGhost; + } + } else { return false; } } - void CompileClassMembers(ClassDecl c, bool forCompanionClass, int indent) { + void CompileClassMembers(ClassDecl c, bool forCompanionClass, int indent, TextWriter wr) { Contract.Requires(c != null); Contract.Requires(!forCompanionClass || c is TraitDecl); Contract.Requires(0 <= indent); @@ -765,9 +827,9 @@ if (member is Field) { var f = (Field)member; // every field is inherited - Indent(indent); - wr.WriteLine("public {0} @_{1};", TypeName(f.Type), f.CompileName); - wr.Write("public {0} @{1}", TypeName(f.Type), f.CompileName); + Indent(indent, wr); + wr.WriteLine("public {0} @_{1};", TypeName(f.Type, wr), f.CompileName); + wr.Write("public {0} @{1}", TypeName(f.Type, wr), f.CompileName); wr.WriteLine(" {"); wr.WriteLine(" get { "); wr.Write("return this.@_{0};", f.CompileName); @@ -779,11 +841,11 @@ } else if (member is Function) { var f = (Function)member; Contract.Assert(f.Body != null); - CompileFunction(indent, f); + CompileFunction(indent, f, wr); } else if (member is Method) { var method = (Method)member; Contract.Assert(method.Body != null); - CompileMethod(c, indent, method); + CompileMethod(c, indent, method, wr); } else { Contract.Assert(false); // unexpected member } @@ -794,12 +856,12 @@ if (f.IsGhost || forCompanionClass) { // emit nothing } else if (c is TraitDecl) { - Indent(indent); - wr.Write("{0} @{1}", TypeName(f.Type), f.CompileName); + Indent(indent, wr); + wr.Write("{0} @{1}", TypeName(f.Type, wr), f.CompileName); wr.WriteLine(" { get; set; }"); } else { - Indent(indent); - wr.WriteLine("public {0} @{1} = {2};", TypeName(f.Type), f.CompileName, DefaultValue(f.Type)); + Indent(indent, wr); + wr.WriteLine("public {0} @{1} = {2};", TypeName(f.Type, wr), f.CompileName, DefaultValue(f.Type, wr)); } } else if (member is Function) { var f = (Function)member; @@ -808,29 +870,29 @@ if (forCompanionClass || Attributes.Contains(f.Attributes, "axiom")) { // suppress error message (in the case of "forCompanionClass", the non-forCompanionClass call will produce the error message) } else { - Error("Function {0} has no body", f.FullName); + Error("Function {0} has no body", wr, f.FullName); } } else if (f.IsGhost) { // nothing to compile, but we do check for assumes if (f.Body == null) { Contract.Assert(c is TraitDecl && !f.IsStatic); } else { - var v = new CheckHasNoAssumes_Visitor(this); + var v = new CheckHasNoAssumes_Visitor(this, wr); v.Visit(f.Body); } } else if (c is TraitDecl && !forCompanionClass) { // include it, unless it's static if (!f.IsStatic) { - Indent(indent); - wr.Write("{0} @{1}", TypeName(f.ResultType), f.CompileName); + Indent(indent, wr); + wr.Write("{0} @{1}", TypeName(f.ResultType, wr), f.CompileName); wr.Write("("); - WriteFormals("", f.Formals); + WriteFormals("", f.Formals, wr); wr.WriteLine(");"); } } else if (forCompanionClass && !f.IsStatic) { // companion classes only has static members } else { - CompileFunction(indent, f); + CompileFunction(indent, f, wr); } } else if (member is Method) { var m = (Method)member; @@ -839,30 +901,30 @@ if (forCompanionClass || Attributes.Contains(m.Attributes, "axiom")) { // suppress error message (in the case of "forCompanionClass", the non-forCompanionClass call will produce the error message) } else { - Error("Method {0} has no body", m.FullName); + Error("Method {0} has no body", wr, m.FullName); } } else if (m.IsGhost) { // nothing to compile, but we do check for assumes if (m.Body == null) { Contract.Assert(c is TraitDecl && !m.IsStatic); } else { - var v = new CheckHasNoAssumes_Visitor(this); + var v = new CheckHasNoAssumes_Visitor(this, wr); v.Visit(m.Body); } } else if (c is TraitDecl && !forCompanionClass) { // include it, unless it's static if (!m.IsStatic) { - Indent(indent); + Indent(indent, wr); wr.Write("void @{0}", m.CompileName); wr.Write("("); - int nIns = WriteFormals("", m.Ins); - WriteFormals(nIns == 0 ? "" : ", ", m.Outs); + int nIns = WriteFormals("", m.Ins, wr); + WriteFormals(nIns == 0 ? "" : ", ", m.Outs, wr); wr.WriteLine(");"); } } else if (forCompanionClass && !m.IsStatic) { // companion classes only has static members } else { - CompileMethod(c, indent, m); + CompileMethod(c, indent, m, wr); } } else { Contract.Assert(false); throw new cce.UnreachableException(); // unexpected member @@ -870,85 +932,218 @@ } } - private void CompileFunction(int indent, Function f) { - Indent(indent); - wr.Write("public {0}{1} @{2}", f.IsStatic ? "static " : "", TypeName(f.ResultType), f.CompileName); + private void CompileFunction(int indent, Function f, TextWriter wr) { + Indent(indent, wr); + wr.Write("public {0}{1} @{2}", f.IsStatic ? "static " : "", TypeName(f.ResultType, wr), f.CompileName); if (f.TypeArgs.Count != 0) { wr.Write("<{0}>", TypeParameters(f.TypeArgs)); } wr.Write("("); - WriteFormals("", f.Formals); + WriteFormals("", f.Formals, wr); wr.WriteLine(") {"); - CompileReturnBody(f.Body, indent + IndentAmount); - Indent(indent); wr.WriteLine("}"); + CompileReturnBody(f.Body, indent + IndentAmount, wr); + Indent(indent, wr); wr.WriteLine("}"); } - private void CompileMethod(ClassDecl c, int indent, Method m) { - Indent(indent); + private void CompileMethod(ClassDecl c, int indent, Method m, TextWriter wr) { + Indent(indent, wr); wr.Write("public {0}void @{1}", m.IsStatic ? "static " : "", m.CompileName); if (m.TypeArgs.Count != 0) { wr.Write("<{0}>", TypeParameters(m.TypeArgs)); } wr.Write("("); - int nIns = WriteFormals("", m.Ins); - WriteFormals(nIns == 0 ? "" : ", ", m.Outs); + int nIns = WriteFormals("", m.Ins, wr); + WriteFormals(nIns == 0 ? "" : ", ", m.Outs, wr); wr.WriteLine(")"); - Indent(indent); wr.WriteLine("{"); + Indent(indent, wr); wr.WriteLine("{"); foreach (Formal p in m.Outs) { if (!p.IsGhost) { - Indent(indent + IndentAmount); - wr.WriteLine("@{0} = {1};", p.CompileName, DefaultValue(p.Type)); + Indent(indent + IndentAmount, wr); + wr.WriteLine("@{0} = {1};", p.CompileName, DefaultValue(p.Type, wr)); } } if (m.Body == null) { - Error("Method {0} has no body", m.FullName); + Error("Method {0} has no body", wr, m.FullName); } else { if (m.IsTailRecursive) { - Indent(indent); wr.WriteLine("TAIL_CALL_START: ;"); if (!m.IsStatic) { - Indent(indent + IndentAmount); wr.WriteLine("var _this = this;"); + Indent(indent + IndentAmount, wr); wr.WriteLine("var _this = this;"); } + Indent(indent, wr); wr.WriteLine("TAIL_CALL_START: ;"); } Contract.Assert(enclosingMethod == null); enclosingMethod = m; - TrStmtList(m.Body.Body, indent); + TrStmtList(m.Body.Body, indent, wr); Contract.Assert(enclosingMethod == m); enclosingMethod = null; } - Indent(indent); wr.WriteLine("}"); + Indent(indent, wr); wr.WriteLine("}"); // allow the Main method to be an instance method - if (!m.IsStatic && IsMain(m)) { - Indent(indent); + if (IsMain(m) && (!m.IsStatic || m.CompileName != "Main")) { + Indent(indent, wr); wr.WriteLine("public static void Main(string[] args) {"); - Contract.Assert(m.EnclosingClass == c); - Indent(indent + IndentAmount); - wr.Write("@{0} b = new @{0}", c.CompileName); - if (c.TypeArgs.Count != 0) { - // instantiate every parameter, it doesn't particularly matter how - wr.Write("<"); - string sep = ""; - for (int i = 0; i < c.TypeArgs.Count; i++) { - wr.Write("{0}int", sep); - sep = ", "; + if (!m.IsStatic) { + Contract.Assert(m.EnclosingClass == c); + Indent(indent + IndentAmount, wr); + wr.Write("@{0} b = new @{0}", c.CompileName); + if (c.TypeArgs.Count != 0) { + // instantiate every parameter, it doesn't particularly matter how + wr.Write("<"); + string sep = ""; + for (int i = 0; i < c.TypeArgs.Count; i++) { + wr.Write("{0}int", sep); + sep = ", "; + } + wr.Write(">"); } - wr.Write(">"); + wr.WriteLine("();"); + Indent(indent + IndentAmount, wr); wr.WriteLine("b.@{0}();", m.CompileName); + } else { + Indent(indent + IndentAmount, wr); wr.WriteLine("@{0}();", m.CompileName); } - wr.WriteLine("();"); - Indent(indent + IndentAmount); wr.WriteLine("b.@Main();"); - Indent(indent); wr.WriteLine("}"); + Indent(indent, wr); wr.WriteLine("}"); } } - void CompileReturnBody(Expression body, int indent) { - Contract.Requires(0 <= indent); - body = body.Resolved; - Indent(indent); + void TrCasePatternOpt(CasePattern pat, Expression rhs, string rhs_string, int indent, TextWriter wr, bool inLetExprBody) { + Contract.Requires(pat != null); + Contract.Requires(pat.Var != null || rhs != null); + if (pat.Var != null) { + // The trivial Dafny "pattern" expression + // var x := G + // is translated into C# as: + // var x := G; + var bv = pat.Var; + if (!bv.IsGhost) { + Indent(indent, wr); + wr.Write("{0} {1} = ", TypeName(bv.Type, wr), "@" + bv.CompileName); + if (rhs != null) { + TrExpr(rhs, wr, inLetExprBody); + } else { + wr.Write(rhs_string); + } + wr.Write(";\n"); + } + } else if (pat.Arguments != null) { + // The Dafny "pattern" expression + // var Pattern(x,y) := G + // is translated into C# as: + // var tmp := G; + // var x := dtorX(tmp); + // var y := dtorY(tmp); + var ctor = pat.Ctor; + Contract.Assert(ctor != null); // follows from successful resolution + Contract.Assert(pat.Arguments.Count == ctor.Formals.Count); // follows from successful resolution + + // Create the temporary variable to hold G + var tmp_name = idGenerator.FreshId("_let_tmp_rhs"); + Indent(indent, wr); + wr.Write("{0} {1} = ", TypeName(rhs.Type, wr), tmp_name); + TrExpr(rhs, wr, inLetExprBody); + wr.WriteLine(";"); + + var k = 0; // number of non-ghost formals processed + for (int i = 0; i < pat.Arguments.Count; i++) { + var arg = pat.Arguments[i]; + var formal = ctor.Formals[i]; + if (formal.IsGhost) { + // nothing to compile, but do a sanity check + Contract.Assert(!Contract.Exists(arg.Vars, bv => !bv.IsGhost)); + } else { + TrCasePatternOpt(arg, null, string.Format("(({0})({1})._D).@{2}", DtCtorName(ctor, ((DatatypeValue)pat.Expr).InferredTypeArgs, wr), tmp_name, FormalName(formal, k)), indent, wr, inLetExprBody); + k++; + } + } + } + } + + void ReturnExpr(Expression expr, int indent, TextWriter wr, bool inLetExprBody) { + Indent(indent, wr); wr.Write("return "); - TrExpr(body); + TrExpr(expr, wr, inLetExprBody); wr.WriteLine(";"); } + void TrExprOpt(Expression expr, int indent, TextWriter wr, bool inLetExprBody) { + Contract.Requires(expr != null); + if (expr is LetExpr) { + var e = (LetExpr)expr; + if (e.Exact) { + for (int i = 0; i < e.LHSs.Count; i++) { + var lhs = e.LHSs[i]; + if (Contract.Exists(lhs.Vars, bv => !bv.IsGhost)) { + TrCasePatternOpt(lhs, e.RHSs[i], null, indent, wr, inLetExprBody); + } + } + TrExprOpt(e.Body, indent, wr, inLetExprBody); + } else { + // We haven't optimized the other cases, so fallback to normal compilation + ReturnExpr(e, indent, wr, inLetExprBody); + } + } else if (expr is ITEExpr) { + ITEExpr e = (ITEExpr)expr; + Indent(indent, wr); + wr.Write("if ("); + TrExpr(e.Test, wr, inLetExprBody); + wr.Write(") {\n"); + TrExprOpt(e.Thn, indent + IndentAmount, wr, inLetExprBody); + Indent(indent, wr); + wr.WriteLine("} else {"); + TrExprOpt(e.Els, indent + IndentAmount, wr, inLetExprBody); + Indent(indent, wr); + wr.WriteLine("}"); + } else if (expr is MatchExpr) { + var e = (MatchExpr)expr; + // var _source = E; + // if (source.is_Ctor0) { + // FormalType f0 = ((Dt_Ctor0)source._D).a0; + // ... + // return Body0; + // } else if (...) { + // ... + // } else if (true) { + // ... + // } + string source = idGenerator.FreshId("_source"); + Indent(indent, wr); + wr.Write("{0} {1} = ", TypeName(e.Source.Type, wr), source); + TrExpr(e.Source, wr, inLetExprBody); + wr.WriteLine(";"); + + if (e.Cases.Count == 0) { + // the verifier would have proved we never get here; still, we need some code that will compile + wr.Write("throw new System.Exception();"); + } else { + int i = 0; + var sourceType = (UserDefinedType)e.Source.Type.NormalizeExpand(); + foreach (MatchCaseExpr mc in e.Cases) { + //Indent(indent); + MatchCasePrelude(source, sourceType, cce.NonNull(mc.Ctor), mc.Arguments, i, e.Cases.Count, indent, wr); + TrExprOpt(mc.Body, indent + IndentAmount, wr, inLetExprBody); + i++; + } + Indent(indent, wr); + wr.WriteLine("}"); + } + } else if (expr is StmtExpr) { + var e = (StmtExpr)expr; + TrExprOpt(e.E, indent, wr, inLetExprBody); + } else { + // We haven't optimized any other cases, so fallback to normal compilation + ReturnExpr(expr, indent, wr, inLetExprBody); + } + } + + void CompileReturnBody(Expression body, int indent, TextWriter wr) { + Contract.Requires(0 <= indent); + body = body.Resolved; + //Indent(indent); + //wr.Write("return "); + TrExprOpt(body, indent, wr, false); + //wr.WriteLine(";"); + } + // ----- Type --------------------------------------------------------------------------------- readonly string DafnySetClass = "Dafny.Set"; @@ -964,168 +1159,207 @@ return null; } - string TypeName_Companion(Type type) { + string TypeName_Companion(Type type, TextWriter wr) { Contract.Requires(type != null); var udt = type as UserDefinedType; if (udt != null && udt.ResolvedClass is TraitDecl) { string s = udt.FullCompanionCompileName; if (udt.TypeArgs.Count != 0) { if (udt.TypeArgs.Exists(argType => argType is ObjectType)) { - Error("compilation does not support type 'object' as a type parameter; consider introducing a ghost"); + Error("compilation does not support type 'object' as a type parameter; consider introducing a ghost", wr); } - s += "<" + TypeNames(udt.TypeArgs) + ">"; + s += "<" + TypeNames(udt.TypeArgs, wr) + ">"; } return s; } else { - return TypeName(type); + return TypeName(type, wr); } } - string TypeName(Type type) + string TypeName(Type type, TextWriter wr) { Contract.Requires(type != null); Contract.Ensures(Contract.Result() != null); - type = type.NormalizeExpand(); - if (type is TypeProxy) { + var xType = type.NormalizeExpand(); + if (xType is TypeProxy) { // unresolved proxy; just treat as ref, since no particular type information is apparently needed for this type return "object"; } - if (type is BoolType) { + if (xType is BoolType) { return "bool"; - } else if (type is CharType) { + } else if (xType is CharType) { return "char"; - } else if (type is IntType) { + } else if (xType is IntType) { return "BigInteger"; - } else if (type is RealType) { + } else if (xType is RealType) { return "Dafny.BigRational"; - } else if (type.AsNewtype != null) { - NativeType nativeType = type.AsNewtype.NativeType; + } else if (xType.AsNewtype != null) { + NativeType nativeType = xType.AsNewtype.NativeType; if (nativeType != null) { return nativeType.Name; } - return TypeName(type.AsNewtype.BaseType); - } else if (type is ObjectType) { + return TypeName(xType.AsNewtype.BaseType, wr); + } else if (xType is ObjectType) { return "object"; - } else if (type.IsArrayType) { - ArrayClassDecl at = type.AsArrayType; + } else if (xType.IsArrayType) { + ArrayClassDecl at = xType.AsArrayType; Contract.Assert(at != null); // follows from type.IsArrayType - Type elType = UserDefinedType.ArrayElementType(type); - string name = TypeName(elType) + "["; + Type elType = UserDefinedType.ArrayElementType(xType); + string name = TypeName(elType, wr) + "["; for (int i = 1; i < at.Dims; i++) { name += ","; } return name + "]"; - } else if (type is UserDefinedType) { - var udt = (UserDefinedType)type; - return TypeName_UDT(udt.FullCompileName, udt.TypeArgs); - } else if (type is SetType) { - Type argType = ((SetType)type).Arg; + } else if (xType is UserDefinedType) { + var udt = (UserDefinedType)xType; + var s = udt.FullCompileName; + var rc = udt.ResolvedClass; + if (DafnyOptions.O.IronDafny && + !(xType is ArrowType) && + rc != null && + rc.Module != null && + !rc.Module.IsDefaultModule) { + while (rc.ClonedFrom != null || rc.ExclusiveRefinement != null) { + if (rc.ClonedFrom != null) { + rc = (TopLevelDecl)rc.ClonedFrom; + } else { + Contract.Assert(rc.ExclusiveRefinement != null); + rc = rc.ExclusiveRefinement; + } + } + s = rc.FullCompileName; + } + return TypeName_UDT(s, udt.TypeArgs, wr); + } else if (xType is SetType) { + Type argType = ((SetType)xType).Arg; if (argType is ObjectType) { - Error("compilation of set is not supported; consider introducing a ghost"); + Error("compilation of set is not supported; consider introducing a ghost", wr); } - return DafnySetClass + "<" + TypeName(argType) + ">"; - } else if (type is SeqType) { - Type argType = ((SeqType)type).Arg; + return DafnySetClass + "<" + TypeName(argType, wr) + ">"; + } else if (xType is SeqType) { + Type argType = ((SeqType)xType).Arg; if (argType is ObjectType) { - Error("compilation of seq is not supported; consider introducing a ghost"); + Error("compilation of seq is not supported; consider introducing a ghost", wr); } - return DafnySeqClass + "<" + TypeName(argType) + ">"; - } else if (type is MultiSetType) { - Type argType = ((MultiSetType)type).Arg; + return DafnySeqClass + "<" + TypeName(argType, wr) + ">"; + } else if (xType is MultiSetType) { + Type argType = ((MultiSetType)xType).Arg; if (argType is ObjectType) { - Error("compilation of seq is not supported; consider introducing a ghost"); + Error("compilation of seq is not supported; consider introducing a ghost", wr); } - return DafnyMultiSetClass + "<" + TypeName(argType) + ">"; - } else if (type is MapType) { - Type domType = ((MapType)type).Domain; - Type ranType = ((MapType)type).Range; + return DafnyMultiSetClass + "<" + TypeName(argType, wr) + ">"; + } else if (xType is MapType) { + Type domType = ((MapType)xType).Domain; + Type ranType = ((MapType)xType).Range; if (domType is ObjectType || ranType is ObjectType) { - Error("compilation of map or map<_, object> is not supported; consider introducing a ghost"); + Error("compilation of map or map<_, object> is not supported; consider introducing a ghost", wr); } - return DafnyMapClass + "<" + TypeName(domType) + "," + TypeName(ranType) + ">"; + return DafnyMapClass + "<" + TypeName(domType, wr) + "," + TypeName(ranType, wr) + ">"; } else { Contract.Assert(false); throw new cce.UnreachableException(); // unexpected type } } - string TypeName_UDT(string fullCompileName, List typeArgs) { + string TypeName_UDT(string fullCompileName, List typeArgs, TextWriter wr) { Contract.Requires(fullCompileName != null); Contract.Requires(typeArgs != null); string s = "@" + fullCompileName; if (typeArgs.Count != 0) { if (typeArgs.Exists(argType => argType is ObjectType)) { - Error("compilation does not support type 'object' as a type parameter; consider introducing a ghost"); + Error("compilation does not support type 'object' as a type parameter; consider introducing a ghost", wr); } - s += "<" + TypeNames(typeArgs) + ">"; + s += "<" + TypeNames(typeArgs, wr) + ">"; } return s; } - string/*!*/ TypeNames(List/*!*/ types) { + string/*!*/ TypeNames(List/*!*/ types, TextWriter wr) { Contract.Requires(cce.NonNullElements(types)); Contract.Ensures(Contract.Result() != null); - return Util.Comma(types, TypeName); + string res = ""; + string c = ""; + foreach (var t in types) { + res += c + TypeName(t, wr); + c = ","; + } + return res; } string/*!*/ TypeParameters(List/*!*/ targs) { Contract.Requires(cce.NonNullElements(targs)); Contract.Ensures(Contract.Result() != null); - string s = ""; - string sep = ""; - foreach (TypeParameter tp in targs) { - s += sep + "@" + tp.CompileName; - sep = ","; - } - return s; + return Util.Comma(targs, tp => "@" + tp.CompileName); } - string DefaultValue(Type type) + string DefaultValue(Type type, TextWriter wr) { Contract.Requires(type != null); Contract.Ensures(Contract.Result() != null); - type = type.NormalizeExpand(); - if (type is TypeProxy) { + var xType = type.NormalizeExpand(); + if (xType is TypeProxy) { // unresolved proxy; just treat as ref, since no particular type information is apparently needed for this type return "null"; } - if (type is BoolType) { + if (xType is BoolType) { return "false"; - } else if (type is CharType) { + } else if (xType is CharType) { return "'D'"; - } else if (type is IntType) { + } else if (xType is IntType) { return "BigInteger.Zero"; - } else if (type is RealType) { + } else if (xType is RealType) { return "Dafny.BigRational.ZERO"; - } else if (type.AsNewtype != null) { - if (type.AsNewtype.NativeType != null) { + } else if (xType.AsNewtype != null) { + if (xType.AsNewtype.NativeType != null) { return "0"; } - return DefaultValue(type.AsNewtype.BaseType); - } else if (type.IsRefType) { - return string.Format("({0})null", TypeName(type)); - } else if (type.IsDatatype) { - UserDefinedType udt = (UserDefinedType)type; - string s = "@" + udt.FullCompileName; + return DefaultValue(xType.AsNewtype.BaseType, wr); + } else if (xType.IsRefType) { + return string.Format("({0})null", TypeName(xType, wr)); + } else if (xType.IsDatatype) { + var udt = (UserDefinedType)xType; + var s = "@" + udt.FullCompileName; + var rc = udt.ResolvedClass; + if (DafnyOptions.O.IronDafny && + !(xType is ArrowType) && + rc != null && + rc.Module != null && + !rc.Module.IsDefaultModule) { + while (rc.ClonedFrom != null || rc.ExclusiveRefinement != null) { + if (rc.ClonedFrom != null) { + rc = (TopLevelDecl)rc.ClonedFrom; + } else { + Contract.Assert(rc.ExclusiveRefinement != null); + rc = rc.ExclusiveRefinement; + } + } + s = "@" + rc.FullCompileName; + } if (udt.TypeArgs.Count != 0) { - s += "<" + TypeNames(udt.TypeArgs) + ">"; + s += "<" + TypeNames(udt.TypeArgs, wr) + ">"; } return string.Format("new {0}()", s); - } else if (type.IsTypeParameter) { - var udt = (UserDefinedType)type; - return "default(@" + udt.FullCompileName + ")"; - } else if (type is SetType) { - return DafnySetClass + "<" + TypeName(((SetType)type).Arg) + ">.Empty"; - } else if (type is MultiSetType) { - return DafnyMultiSetClass + "<" + TypeName(((MultiSetType)type).Arg) + ">.Empty"; - } else if (type is SeqType) { - return DafnySeqClass + "<" + TypeName(((SeqType)type).Arg) + ">.Empty"; - } else if (type is MapType) { - return TypeName(type)+".Empty"; - } else if (type is ArrowType) { + } else if (xType.IsTypeParameter) { + var udt = (UserDefinedType)xType; + string s = "default(@" + udt.FullCompileName; + if (udt.TypeArgs.Count != 0) + { + s += "<" + TypeNames(udt.TypeArgs, wr) + ">"; + } + s += ")"; + return s; + } else if (xType is SetType) { + return DafnySetClass + "<" + TypeName(((SetType)xType).Arg, wr) + ">.Empty"; + } else if (xType is MultiSetType) { + return DafnyMultiSetClass + "<" + TypeName(((MultiSetType)xType).Arg, wr) + ">.Empty"; + } else if (xType is SeqType) { + return DafnySeqClass + "<" + TypeName(((SeqType)xType).Arg, wr) + ">.Empty"; + } else if (xType is MapType) { + return TypeName(xType, wr) + ".Empty"; + } else if (xType is ArrowType) { return "null"; } else { Contract.Assert(false); throw new cce.UnreachableException(); // unexpected type @@ -1137,59 +1371,61 @@ public class CheckHasNoAssumes_Visitor : BottomUpVisitor { readonly Compiler compiler; - public CheckHasNoAssumes_Visitor(Compiler c) { + TextWriter wr; + public CheckHasNoAssumes_Visitor(Compiler c, TextWriter wr) { Contract.Requires(c != null); compiler = c; + this.wr = wr; } protected override void VisitOneStmt(Statement stmt) { if (stmt is AssumeStmt) { - compiler.Error("an assume statement cannot be compiled (line {0})", stmt.Tok.line); + compiler.Error("an assume statement cannot be compiled (line {0})", wr, stmt.Tok.line); } else if (stmt is AssignSuchThatStmt) { var s = (AssignSuchThatStmt)stmt; if (s.AssumeToken != null) { - compiler.Error("an assume statement cannot be compiled (line {0})", s.AssumeToken.line); + compiler.Error("an assume statement cannot be compiled (line {0})", wr, s.AssumeToken.line); } } else if (stmt is ForallStmt) { var s = (ForallStmt)stmt; if (s.Body == null) { - compiler.Error("a forall statement without a body cannot be compiled (line {0})", stmt.Tok.line); + compiler.Error("a forall statement without a body cannot be compiled (line {0})", wr, stmt.Tok.line); } } else if (stmt is WhileStmt) { var s = (WhileStmt)stmt; if (s.Body == null) { - compiler.Error("a while statement without a body cannot be compiled (line {0})", stmt.Tok.line); + compiler.Error("a while statement without a body cannot be compiled (line {0})", wr, stmt.Tok.line); } } } } - void TrStmt(Statement stmt, int indent) + TextWriter TrStmt(Statement stmt, int indent) { Contract.Requires(stmt != null); + TextWriter wr = new StringWriter(); if (stmt.IsGhost) { - var v = new CheckHasNoAssumes_Visitor(this); + var v = new CheckHasNoAssumes_Visitor(this, wr); v.Visit(stmt); - Indent(indent); wr.WriteLine("{ }"); - return; + Indent(indent, wr); wr.WriteLine("{ }"); + return wr; } - if (stmt is PrintStmt) { PrintStmt s = (PrintStmt)stmt; foreach (var arg in s.Args) { - Indent(indent); + Indent(indent, wr); wr.Write("System.Console.Write("); - TrExpr(arg); + TrExpr(arg, wr, false); wr.WriteLine(");"); } } else if (stmt is BreakStmt) { var s = (BreakStmt)stmt; - Indent(indent); + Indent(indent, wr); wr.WriteLine("goto after_{0};", s.TargetStmt.Labels.Data.AssignUniqueId("after_", idGenerator)); } else if (stmt is ProduceStmt) { var s = (ProduceStmt)stmt; if (s.hiddenUpdate != null) - TrStmt(s.hiddenUpdate, indent); - Indent(indent); + wr.Write(TrStmt(s.hiddenUpdate, indent).ToString()); + Indent(indent, wr); if (s is YieldStmt) { wr.WriteLine("yield return null;"); } else { @@ -1199,7 +1435,7 @@ var s = (UpdateStmt)stmt; var resolved = s.ResolvedStatements; if (resolved.Count == 1) { - TrStmt(resolved[0], indent); + wr.Write(TrStmt(resolved[0], indent).ToString()); } else { // multi-assignment Contract.Assert(s.Lhss.Count == resolved.Count); @@ -1211,17 +1447,17 @@ var lhs = s.Lhss[i]; var rhs = s.Rhss[i]; if (!(rhs is HavocRhs)) { - lvalues.Add(CreateLvalue(lhs, indent)); + lvalues.Add(CreateLvalue(lhs, indent, wr)); string target = idGenerator.FreshId("_rhs"); rhss.Add(target); - TrRhs("var " + target, null, rhs, indent); + TrRhs("var " + target, null, rhs, indent, wr); } } } Contract.Assert(lvalues.Count == rhss.Count); for (int i = 0; i < lvalues.Count; i++) { - Indent(indent); + Indent(indent, wr); wr.WriteLine("{0} = {1};", lvalues[i], rhss[i]); } } @@ -1229,32 +1465,32 @@ } else if (stmt is AssignStmt) { AssignStmt s = (AssignStmt)stmt; Contract.Assert(!(s.Lhs is SeqSelectExpr) || ((SeqSelectExpr)s.Lhs).SelectOne); // multi-element array assignments are not allowed - TrRhs(null, s.Lhs, s.Rhs, indent); + TrRhs(null, s.Lhs, s.Rhs, indent, wr); } else if (stmt is AssignSuchThatStmt) { var s = (AssignSuchThatStmt)stmt; if (s.AssumeToken != null) { // Note, a non-ghost AssignSuchThatStmt may contain an assume - Error("an assume statement cannot be compiled (line {0})", s.AssumeToken.line); + Error("an assume statement cannot be compiled (line {0})", wr, s.AssumeToken.line); } else if (s.MissingBounds != null) { foreach (var bv in s.MissingBounds) { - Error("this assign-such-that statement is too advanced for the current compiler; Dafny's heuristics cannot find any bound for variable '{0}' (line {1})", bv.Name, s.Tok.line); + Error("this assign-such-that statement is too advanced for the current compiler; Dafny's heuristics cannot find any bound for variable '{0}' (line {1})", wr, bv.Name, s.Tok.line); } } else { Contract.Assert(s.Bounds != null); // follows from s.MissingBounds == null TrAssignSuchThat(indent, s.Lhss.ConvertAll(lhs => ((IdentifierExpr)lhs.Resolved).Var), // the resolver allows only IdentifierExpr left-hand sides - s.Expr, s.Bounds, s.Tok.line); + s.Expr, s.Bounds, s.Tok.line, wr, false); } } else if (stmt is CallStmt) { CallStmt s = (CallStmt)stmt; - TrCallStmt(s, null, indent); + wr.Write(TrCallStmt(s, null, indent).ToString()); } else if (stmt is BlockStmt) { - Indent(indent); wr.WriteLine("{"); - TrStmtList(((BlockStmt)stmt).Body, indent); - Indent(indent); wr.WriteLine("}"); + Indent(indent, wr); wr.WriteLine("{"); + TrStmtList(((BlockStmt)stmt).Body, indent, wr); + Indent(indent, wr); wr.WriteLine("}"); } else if (stmt is IfStmt) { IfStmt s = (IfStmt)stmt; @@ -1263,37 +1499,45 @@ if (s.Els == null) { // let's compile the "else" branch, since that involves no work // (still, let's leave a marker in the source code to indicate that this is what we did) - Indent(indent); + Indent(indent, wr); wr.WriteLine("if (!false) { }"); } else { // let's compile the "then" branch - Indent(indent); + Indent(indent, wr); wr.WriteLine("if (true)"); - TrStmt(s.Thn, indent); + wr.Write(TrStmt(s.Thn, indent).ToString()); } } else { - Indent(indent); wr.Write("if ("); - TrExpr(s.Guard); + Indent(indent, wr); wr.Write("if ("); + TrExpr(s.IsExistentialGuard ? Translator.AlphaRename((ExistsExpr)s.Guard, "eg_d", new Translator(null)) : s.Guard, wr, false); wr.WriteLine(")"); - TrStmt(s.Thn, indent); + // We'd like to do "TrStmt(s.Thn, indent)", except we want the scope of any existential variables to come inside the block + Indent(indent, wr); wr.WriteLine("{"); + if (s.IsExistentialGuard) { + IntroduceAndAssignBoundVars(indent + IndentAmount, (ExistsExpr)s.Guard, wr); + } + TrStmtList(s.Thn.Body, indent, wr); + Indent(indent, wr); wr.WriteLine("}"); + if (s.Els != null) { - Indent(indent); wr.WriteLine("else"); - TrStmt(s.Els, indent); + Indent(indent, wr); wr.WriteLine("else"); + wr.Write(TrStmt(s.Els, indent).ToString()); } } } else if (stmt is AlternativeStmt) { var s = (AlternativeStmt)stmt; - foreach (var alternative in s.Alternatives) { - } - Indent(indent); + Indent(indent, wr); foreach (var alternative in s.Alternatives) { wr.Write("if ("); - TrExpr(alternative.Guard); + TrExpr(alternative.IsExistentialGuard ? Translator.AlphaRename((ExistsExpr)alternative.Guard, "eg_d", new Translator(null)) : alternative.Guard, wr, false); wr.WriteLine(") {"); - TrStmtList(alternative.Body, indent); - Indent(indent); + if (alternative.IsExistentialGuard) { + IntroduceAndAssignBoundVars(indent + IndentAmount, (ExistsExpr)alternative.Guard, wr); + } + TrStmtList(alternative.Body, indent, wr); + Indent(indent, wr); wr.Write("} else "); } wr.WriteLine("{ /*unreachable alternative*/ }"); @@ -1301,38 +1545,38 @@ } else if (stmt is WhileStmt) { WhileStmt s = (WhileStmt)stmt; if (s.Body == null) { - return; + return wr; } if (s.Guard == null) { - Indent(indent); + Indent(indent, wr); wr.WriteLine("while (false) { }"); } else { - Indent(indent); + Indent(indent, wr); wr.Write("while ("); - TrExpr(s.Guard); + TrExpr(s.Guard, wr, false); wr.WriteLine(")"); - TrStmt(s.Body, indent); + wr.Write(TrStmt(s.Body, indent).ToString()); } } else if (stmt is AlternativeLoopStmt) { var s = (AlternativeLoopStmt)stmt; if (s.Alternatives.Count != 0) { - Indent(indent); + Indent(indent, wr); wr.WriteLine("while (true) {"); int ind = indent + IndentAmount; foreach (var alternative in s.Alternatives) { } - Indent(ind); + Indent(ind, wr); foreach (var alternative in s.Alternatives) { wr.Write("if ("); - TrExpr(alternative.Guard); + TrExpr(alternative.Guard, wr, false); wr.WriteLine(") {"); - TrStmtList(alternative.Body, ind); - Indent(ind); + TrStmtList(alternative.Body, ind, wr); + Indent(ind, wr); wr.Write("} else "); } wr.WriteLine("{ break; }"); - Indent(indent); + Indent(indent, wr); wr.WriteLine("}"); } @@ -1340,17 +1584,17 @@ var s = (ForallStmt)stmt; if (s.Kind != ForallStmt.ParBodyKind.Assign) { // Call and Proof have no side effects, so they can simply be optimized away. - return; + return wr; } else if (s.BoundVars.Count == 0) { // the bound variables just spell out a single point, so the forall statement is equivalent to one execution of the body - TrStmt(s.Body, indent); - return; + wr.Write(TrStmt(s.Body, indent).ToString()); + return wr; } var s0 = (AssignStmt)s.S0; if (s0.Rhs is HavocRhs) { // The forall statement says to havoc a bunch of things. This can be efficiently compiled // into doing nothing. - return; + return wr; } var rhs = ((ExprRhs)s0.Rhs).Expr; @@ -1394,29 +1638,29 @@ if (s0.Lhs is MemberSelectExpr) { var lhs = (MemberSelectExpr)s0.Lhs; L = 2; - tupleTypeArgs = TypeName(lhs.Obj.Type); + tupleTypeArgs = TypeName(lhs.Obj.Type, wr); } else if (s0.Lhs is SeqSelectExpr) { var lhs = (SeqSelectExpr)s0.Lhs; L = 3; // note, we might as well do the BigInteger-to-int cast for array indices here, before putting things into the Tuple rather than when they are extracted from the Tuple - tupleTypeArgs = TypeName(lhs.Seq.Type) + ",int"; + tupleTypeArgs = TypeName(lhs.Seq.Type, wr) + ",int"; } else { var lhs = (MultiSelectExpr)s0.Lhs; L = 2 + lhs.Indices.Count; if (8 < L) { - Error("compiler currently does not support assignments to more-than-6-dimensional arrays in forall statements"); - return; + Error("compiler currently does not support assignments to more-than-6-dimensional arrays in forall statements", wr); + return wr; } - tupleTypeArgs = TypeName(lhs.Array.Type); + tupleTypeArgs = TypeName(lhs.Array.Type, wr); for (int i = 0; i < lhs.Indices.Count; i++) { // note, we might as well do the BigInteger-to-int cast for array indices here, before putting things into the Tuple rather than when they are extracted from the Tuple tupleTypeArgs += ",int"; } } - tupleTypeArgs += "," + TypeName(rhs.Type); + tupleTypeArgs += "," + TypeName(rhs.Type, wr); // declare and construct "ingredients" - Indent(indent); + Indent(indent, wr); wr.WriteLine("var {0} = new System.Collections.Generic.List>();", ingredients, tupleTypeArgs); var n = s.BoundVars.Count; @@ -1426,31 +1670,38 @@ var bound = s.Bounds[i]; var bv = s.BoundVars[i]; if (bound is ComprehensionExpr.BoolBoundedPool) { - Indent(ind); + Indent(ind, wr); wr.Write("foreach (var @{0} in Dafny.Helpers.AllBooleans) {{ ", bv.CompileName); + } else if (bound is ComprehensionExpr.CharBoundedPool) { + Indent(ind, wr); + wr.Write("foreach (var @{0} in Dafny.Helpers.AllChars) {{ ", bv.CompileName); } else if (bound is ComprehensionExpr.IntBoundedPool) { var b = (ComprehensionExpr.IntBoundedPool)bound; - Indent(ind); - wr.Write("for (var @{0} = ", bv.CompileName); - TrExpr(b.LowerBound); - wr.Write("; @{0} < ", bv.CompileName); - TrExpr(b.UpperBound); - wr.Write("; @{0}++) {{ ", bv.CompileName); + Indent(ind, wr); + if (AsNativeType(bv.Type) != null) { + wr.Write("foreach (var @{0} in @{1}.IntegerRange(", bv.CompileName, bv.Type.AsNewtype.FullCompileName); + } else { + wr.Write("foreach (var @{0} in Dafny.Helpers.IntegerRange(", bv.CompileName); + } + TrExpr(b.LowerBound, wr, false); + wr.Write(", "); + TrExpr(b.UpperBound, wr, false); + wr.Write(")) { "); } else if (bound is ComprehensionExpr.SetBoundedPool) { var b = (ComprehensionExpr.SetBoundedPool)bound; - Indent(ind); + Indent(ind, wr); wr.Write("foreach (var @{0} in (", bv.CompileName); - TrExpr(b.Set); + TrExpr(b.Set, wr, false); wr.Write(").Elements) { "); } else if (bound is ComprehensionExpr.SeqBoundedPool) { var b = (ComprehensionExpr.SeqBoundedPool)bound; - Indent(ind); + Indent(ind, wr); wr.Write("foreach (var @{0} in (", bv.CompileName); - TrExpr(b.Seq); + TrExpr(b.Seq, wr, false); wr.Write(").UniqueElements) { "); } else if (bound is ComprehensionExpr.DatatypeBoundedPool) { var b = (ComprehensionExpr.DatatypeBoundedPool)bound; - wr.Write("foreach (var @{0} in {1}.AllSingletonConstructors) {{", bv.CompileName, TypeName(bv.Type)); + wr.Write("foreach (var @{0} in {1}.AllSingletonConstructors) {{", bv.CompileName, TypeName(bv.Type, wr)); } else { Contract.Assert(false); throw new cce.UnreachableException(); // unexpected BoundedPool type } @@ -1460,55 +1711,55 @@ // if (range) { // ingredients.Add(new L-Tuple( LHS0(w,x,y,z), LHS1(w,x,y,z), ..., RHS(w,x,y,z) )); // } - Indent(indent + n * IndentAmount); + Indent(indent + n * IndentAmount, wr); wr.Write("if ("); foreach (var bv in s.BoundVars) { if (bv.Type.NormalizeExpand() is NatType) { wr.Write("0 <= {0} && ", bv.CompileName); } } - TrExpr(s.Range); + TrExpr(s.Range, wr, false); wr.WriteLine(") {"); var indFinal = indent + (n + 1) * IndentAmount; - Indent(indFinal); + Indent(indFinal, wr); wr.Write("{0}.Add(new System.Tuple<{1}>(", ingredients, tupleTypeArgs); if (s0.Lhs is MemberSelectExpr) { var lhs = (MemberSelectExpr)s0.Lhs; - TrExpr(lhs.Obj); + TrExpr(lhs.Obj, wr, false); } else if (s0.Lhs is SeqSelectExpr) { var lhs = (SeqSelectExpr)s0.Lhs; - TrExpr(lhs.Seq); + TrExpr(lhs.Seq, wr, false); wr.Write(", (int)("); - TrExpr(lhs.E0); + TrExpr(lhs.E0, wr, false); wr.Write(")"); } else { var lhs = (MultiSelectExpr)s0.Lhs; - TrExpr(lhs.Array); + TrExpr(lhs.Array, wr, false); for (int i = 0; i < lhs.Indices.Count; i++) { wr.Write(", (int)("); - TrExpr(lhs.Indices[i]); + TrExpr(lhs.Indices[i], wr, false); wr.Write(")"); } } wr.Write(", "); - TrExpr(rhs); + TrExpr(rhs, wr, false); wr.WriteLine("));"); - Indent(indent + n * IndentAmount); + Indent(indent + n * IndentAmount, wr); wr.WriteLine("}"); for (int i = n; 0 <= --i; ) { - Indent(indent + i * IndentAmount); + Indent(indent + i * IndentAmount, wr); wr.WriteLine("}"); } // foreach (L-Tuple l in ingredients) { // LHS[ l0, l1, l2, ..., l(L-2) ] = l(L-1); // } - Indent(indent); + Indent(indent, wr); wr.WriteLine("foreach (var {0} in {1}) {{", tup, ingredients); - Indent(indent + IndentAmount); + Indent(indent + IndentAmount, wr); if (s0.Lhs is MemberSelectExpr) { var lhs = (MemberSelectExpr)s0.Lhs; wr.WriteLine("{0}.Item1.@{1} = {0}.Item2;", tup, lhs.MemberName); @@ -1525,7 +1776,7 @@ } wr.WriteLine("] = {0}.Item{1};", tup, L); } - Indent(indent); + Indent(indent, wr); wr.WriteLine("}"); } else if (stmt is MatchStmt) { @@ -1542,42 +1793,64 @@ // } if (s.Cases.Count != 0) { string source = idGenerator.FreshId("_source"); - Indent(indent); - wr.Write("{0} {1} = ", TypeName(cce.NonNull(s.Source.Type)), source); - TrExpr(s.Source); + Indent(indent, wr); + wr.Write("{0} {1} = ", TypeName(cce.NonNull(s.Source.Type), wr), source); + TrExpr(s.Source, wr, false); wr.WriteLine(";"); int i = 0; var sourceType = (UserDefinedType)s.Source.Type.NormalizeExpand(); foreach (MatchCaseStmt mc in s.Cases) { - MatchCasePrelude(source, sourceType, cce.NonNull(mc.Ctor), mc.Arguments, i, s.Cases.Count, indent); - TrStmtList(mc.Body, indent); + MatchCasePrelude(source, sourceType, cce.NonNull(mc.Ctor), mc.Arguments, i, s.Cases.Count, indent, wr); + TrStmtList(mc.Body, indent, wr); i++; } - Indent(indent); wr.WriteLine("}"); + Indent(indent, wr); wr.WriteLine("}"); } } else if (stmt is VarDeclStmt) { var s = (VarDeclStmt)stmt; foreach (var local in s.Locals) { - TrLocalVar(local, true, indent); + TrLocalVar(local, true, indent, wr); } if (s.Update != null) { - TrStmt(s.Update, indent); + wr.Write(TrStmt(s.Update, indent).ToString()); } + } else if (stmt is LetStmt) { + var s = (LetStmt)stmt; + for (int i = 0; i < s.LHSs.Count; i++) { + var lhs = s.LHSs[i]; + if (Contract.Exists(lhs.Vars, bv => !bv.IsGhost)) { + TrCasePatternOpt(lhs, s.RHSs[i], null, indent, wr, false); + } + } } else if (stmt is ModifyStmt) { var s = (ModifyStmt)stmt; if (s.Body != null) { - TrStmt(s.Body, indent); + wr.Write(TrStmt(s.Body, indent).ToString()); } } else { Contract.Assert(false); throw new cce.UnreachableException(); // unexpected statement } + + return wr; } - private void TrAssignSuchThat(int indent, List lhss, Expression constraint, List bounds, int debuginfoLine) { + private void IntroduceAndAssignBoundVars(int indent, ExistsExpr exists, TextWriter wr) { + Contract.Requires(0 <= indent); + Contract.Requires(exists != null); + Contract.Assume(exists.Bounds != null); // follows from successful resolution + Contract.Assert(exists.Range == null); // follows from invariant of class IfStmt + foreach (var bv in exists.BoundVars) { + TrLocalVar(bv, false, indent, wr); + } + var ivars = exists.BoundVars.ConvertAll(bv => (IVariable)bv); + TrAssignSuchThat(indent, ivars, exists.Term, exists.Bounds, exists.tok.line, wr, false); + } + + private void TrAssignSuchThat(int indent, List lhss, Expression constraint, List bounds, int debuginfoLine, TextWriter wr, bool inLetExprBody) { Contract.Requires(0 <= indent); Contract.Requires(lhss != null); Contract.Requires(constraint != null); @@ -1619,7 +1892,7 @@ int ind = indent; bool needIterLimit = lhss.Count != 1 && bounds.Exists(bnd => !bnd.IsFinite); if (needIterLimit) { - Indent(indent); + Indent(indent, wr); wr.WriteLine("for (var {0} = new BigInteger(5); ; {0} *= 2) {{", iterLimit); ind += IndentAmount; } @@ -1628,89 +1901,87 @@ var bound = bounds[i]; var bv = lhss[i]; if (needIterLimit) { - Indent(ind); + Indent(ind, wr); wr.WriteLine("var {0}_{1} = {0};", iterLimit, i); } var tmpVar = idGenerator.FreshId("_assign_such_that_"); - Indent(ind); + Indent(ind, wr); if (bound is ComprehensionExpr.BoolBoundedPool) { wr.WriteLine("foreach (var {0} in Dafny.Helpers.AllBooleans) {{ @{1} = {0};", tmpVar, bv.CompileName); + } else if (bound is ComprehensionExpr.CharBoundedPool) { + wr.WriteLine("foreach (var {0} in Dafny.Helpers.AllChars) {{ @{1} = {0};", tmpVar, bv.CompileName); } else if (bound is ComprehensionExpr.IntBoundedPool) { var b = (ComprehensionExpr.IntBoundedPool)bound; - // (tmpVar is not used in this case) - if (b.LowerBound != null) { - wr.Write("@{0} = ", bv.CompileName); - TrExpr(b.LowerBound); - wr.WriteLine(";"); - Indent(ind); - if (b.UpperBound != null) { - wr.Write("for (; @{0} < ", bv.CompileName); - TrExpr(b.UpperBound); - wr.WriteLine("; @{0}++) {{ ", bv.CompileName); - } else { - wr.WriteLine("for (;; @{0}++) {{ ", bv.CompileName); - } + if (AsNativeType(bv.Type) != null) { + wr.Write("foreach (var @{0} in @{1}.IntegerRange(", tmpVar, bv.Type.AsNewtype.FullCompileName); } else { - Contract.Assert(b.UpperBound != null); - wr.Write("@{0} = ", bv.CompileName); - TrExpr(b.UpperBound); - wr.WriteLine(";"); - Indent(ind); - wr.WriteLine("for (;; @{0}--) {{ ", bv.CompileName); + wr.Write("foreach (var @{0} in Dafny.Helpers.IntegerRange(", tmpVar); + } + if (b.LowerBound == null) { + wr.Write("null"); + } else { + TrExpr(b.LowerBound, wr, inLetExprBody); + } + wr.Write(", "); + if (b.UpperBound == null) { + wr.Write("null"); + } else { + TrExpr(b.UpperBound, wr, inLetExprBody); } + wr.WriteLine(")) {{ @{1} = {0};", tmpVar, bv.CompileName); } else if (bound is AssignSuchThatStmt.WiggleWaggleBound) { wr.WriteLine("foreach (var {0} in Dafny.Helpers.AllIntegers) {{ @{1} = {0};", tmpVar, bv.CompileName); } else if (bound is ComprehensionExpr.SetBoundedPool) { var b = (ComprehensionExpr.SetBoundedPool)bound; wr.Write("foreach (var {0} in (", tmpVar); - TrExpr(b.Set); + TrExpr(b.Set, wr, inLetExprBody); wr.WriteLine(").Elements) {{ @{0} = {1};", bv.CompileName, tmpVar); } else if (bound is ComprehensionExpr.SubSetBoundedPool) { var b = (ComprehensionExpr.SubSetBoundedPool)bound; wr.Write("foreach (var {0} in (", tmpVar); - TrExpr(b.UpperBound); + TrExpr(b.UpperBound, wr, inLetExprBody); wr.WriteLine(").AllSubsets) {{ @{0} = {1};", bv.CompileName, tmpVar); } else if (bound is ComprehensionExpr.MapBoundedPool) { var b = (ComprehensionExpr.MapBoundedPool)bound; wr.Write("foreach (var {0} in (", tmpVar); - TrExpr(b.Map); + TrExpr(b.Map, wr, inLetExprBody); wr.WriteLine(").Domain) {{ @{0} = {1};", bv.CompileName, tmpVar); } else if (bound is ComprehensionExpr.SeqBoundedPool) { var b = (ComprehensionExpr.SeqBoundedPool)bound; wr.Write("foreach (var {0} in (", tmpVar); - TrExpr(b.Seq); + TrExpr(b.Seq, wr, inLetExprBody); wr.WriteLine(").Elements) {{ @{0} = {1};", bv.CompileName, tmpVar); } else if (bound is ComprehensionExpr.DatatypeBoundedPool) { var b = (ComprehensionExpr.DatatypeBoundedPool)bound; - wr.WriteLine("foreach (var {0} in {1}.AllSingletonConstructors) {{ @{2} = {0};", tmpVar, TypeName(bv.Type), bv.CompileName); + wr.WriteLine("foreach (var {0} in {1}.AllSingletonConstructors) {{ @{2} = {0};", tmpVar, TypeName(bv.Type, wr), bv.CompileName); } else { Contract.Assert(false); throw new cce.UnreachableException(); // unexpected BoundedPool type } if (needIterLimit) { - Indent(ind + IndentAmount); + Indent(ind + IndentAmount, wr); wr.WriteLine("if ({0}_{1} == 0) {{ break; }} {0}_{1}--;", iterLimit, i); } } - Indent(ind); + Indent(ind, wr); wr.Write("if ("); - TrExpr(constraint); + TrExpr(constraint, wr, inLetExprBody); wr.WriteLine(") {"); - Indent(ind + IndentAmount); + Indent(ind + IndentAmount, wr); wr.WriteLine("goto {0};", doneLabel); - Indent(ind); + Indent(ind, wr); wr.WriteLine("}"); - Indent(indent); + Indent(indent, wr); for (int i = 0; i < n; i++) { wr.Write(i == 0 ? "}" : " }"); } wr.WriteLine(needIterLimit ? " }" : ""); - Indent(indent); + Indent(indent, wr); wr.WriteLine("throw new System.Exception(\"assign-such-that search produced no value (line {0})\");", debuginfoLine); - Indent(indent); + Indent(indent, wr); wr.WriteLine("{0}: ;", doneLabel); } - string CreateLvalue(Expression lhs, int indent) { + string CreateLvalue(Expression lhs, int indent, TextWriter wr) { lhs = lhs.Resolved; if (lhs is IdentifierExpr) { var ll = (IdentifierExpr)lhs; @@ -1718,9 +1989,9 @@ } else if (lhs is MemberSelectExpr) { var ll = (MemberSelectExpr)lhs; string obj = idGenerator.FreshId("_obj"); - Indent(indent); + Indent(indent, wr); wr.Write("var {0} = ", obj); - TrExpr(ll.Obj); + TrExpr(ll.Obj, wr, false); wr.WriteLine(";"); return string.Format("{0}.@{1}", obj, ll.Member.CompileName); } else if (lhs is SeqSelectExpr) { @@ -1728,31 +1999,31 @@ var c = idGenerator.FreshNumericId("_arr+_index"); string arr = "_arr" + c; string index = "_index" + c; - Indent(indent); + Indent(indent, wr); wr.Write("var {0} = ", arr); - TrExpr(ll.Seq); + TrExpr(ll.Seq, wr, false); wr.WriteLine(";"); - Indent(indent); + Indent(indent, wr); wr.Write("var {0} = ", index); - TrExpr(ll.E0); + TrExpr(ll.E0, wr, false); wr.WriteLine(";"); return string.Format("{0}[(int){1}]", arr, index); } else { var ll = (MultiSelectExpr)lhs; var c = idGenerator.FreshNumericId("_arr+_index"); string arr = "_arr" + c; - Indent(indent); + Indent(indent, wr); wr.Write("var {0} = ", arr); - TrExpr(ll.Array); + TrExpr(ll.Array, wr, false); wr.WriteLine(";"); string fullString = arr + "["; string sep = ""; int i = 0; foreach (var idx in ll.Indices) { string index = "_index" + i + "_" + c; - Indent(indent); + Indent(indent, wr); wr.Write("var {0} = ", index); - TrExpr(idx); + TrExpr(idx, wr, false); wr.WriteLine(";"); fullString += sep + "(int)" + index; sep = ", "; @@ -1762,21 +2033,21 @@ } } - void TrRhs(string target, Expression targetExpr, AssignmentRhs rhs, int indent) { + void TrRhs(string target, Expression targetExpr, AssignmentRhs rhs, int indent, TextWriter wr) { Contract.Requires((target == null) != (targetExpr == null)); var tRhs = rhs as TypeRhs; if (tRhs != null && tRhs.InitCall != null) { string nw = idGenerator.FreshId("_nw"); - Indent(indent); + Indent(indent, wr); wr.Write("var {0} = ", nw); - TrAssignmentRhs(rhs); // in this case, this call will not require us to spill any let variables first + TrAssignmentRhs(rhs, wr); // in this case, this call will not require us to spill any let variables first wr.WriteLine(";"); - TrCallStmt(tRhs.InitCall, nw, indent); - Indent(indent); + wr.Write(TrCallStmt(tRhs.InitCall, nw, indent).ToString()); + Indent(indent, wr); if (target != null) { wr.Write(target); } else { - TrExpr(targetExpr); + TrExpr(targetExpr, wr, false); } wr.WriteLine(" = {0};", nw); } else if (rhs is HavocRhs) { @@ -1787,22 +2058,23 @@ foreach (Expression dim in tRhs.ArrayDimensions) { } } - Indent(indent); + Indent(indent, wr); if (target != null) { wr.Write(target); } else { - TrExpr(targetExpr); + TrExpr(targetExpr, wr, false); } wr.Write(" = "); - TrAssignmentRhs(rhs); + TrAssignmentRhs(rhs, wr); wr.WriteLine(";"); } } - void TrCallStmt(CallStmt s, string receiverReplacement, int indent) { + TextWriter TrCallStmt(CallStmt s, string receiverReplacement, int indent) { Contract.Requires(s != null); Contract.Assert(s.Method != null); // follows from the fact that stmt has been successfully resolved + StringWriter wr = new StringWriter(); if (s.Method == enclosingMethod && enclosingMethod.IsTailRecursive) { // compile call as tail-recursive @@ -1819,9 +2091,9 @@ string inTmp = idGenerator.FreshId("_in"); inTmps.Add(inTmp); - Indent(indent); + Indent(indent, wr); wr.Write("var {0} = ", inTmp); - TrExpr(s.Receiver); + TrExpr(s.Receiver, wr, false); wr.WriteLine(";"); } for (int i = 0; i < s.Method.Ins.Count; i++) { @@ -1829,29 +2101,29 @@ if (!p.IsGhost) { string inTmp = idGenerator.FreshId("_in"); inTmps.Add(inTmp); - Indent(indent); + Indent(indent, wr); wr.Write("var {0} = ", inTmp); - TrExpr(s.Args[i]); + TrExpr(s.Args[i], wr, false); wr.WriteLine(";"); } } // Now, assign to the formals int n = 0; if (!s.Method.IsStatic) { - Indent(indent); + Indent(indent, wr); wr.WriteLine("_this = {0};", inTmps[n]); n++; } foreach (var p in s.Method.Ins) { if (!p.IsGhost) { - Indent(indent); + Indent(indent, wr); wr.WriteLine("{0} = {1};", p.CompileName, inTmps[n]); n++; } } Contract.Assert(n == inTmps.Count); // finally, the jump back to the head of the method - Indent(indent); + Indent(indent, wr); wr.WriteLine("goto TAIL_CALL_START;"); } else { @@ -1862,7 +2134,7 @@ for (int i = 0; i < s.Method.Outs.Count; i++) { Formal p = s.Method.Outs[i]; if (!p.IsGhost) { - lvalues.Add(CreateLvalue(s.Lhs[i], indent)); + lvalues.Add(CreateLvalue(s.Lhs[i], indent, wr)); } } var outTmps = new List(); @@ -1871,8 +2143,8 @@ if (!p.IsGhost) { string target = idGenerator.FreshId("_out"); outTmps.Add(target); - Indent(indent); - wr.WriteLine("{0} {1};", TypeName(s.Lhs[i].Type), target); + Indent(indent, wr); + wr.WriteLine("{0} {1};", TypeName(s.Lhs[i].Type, wr), target); } } Contract.Assert(lvalues.Count == outTmps.Count); @@ -1883,14 +2155,14 @@ } } if (receiverReplacement != null) { - Indent(indent); + Indent(indent, wr); wr.Write("@" + receiverReplacement); } else if (s.Method.IsStatic) { - Indent(indent); - wr.Write(TypeName_Companion(s.Receiver.Type)); + Indent(indent, wr); + wr.Write(TypeName_Companion(s.Receiver.Type, wr)); } else { - Indent(indent); - TrParenExpr(s.Receiver); + Indent(indent, wr); + TrParenExpr(s.Receiver, wr, false); } wr.Write(".@{0}(", s.Method.CompileName); @@ -1899,7 +2171,7 @@ Formal p = s.Method.Ins[i]; if (!p.IsGhost) { wr.Write(sep); - TrExpr(s.Args[i]); + TrExpr(s.Args[i], wr, false); sep = ", "; } } @@ -1912,44 +2184,45 @@ // assign to the actual LHSs for (int j = 0; j < lvalues.Count; j++) { - Indent(indent); + Indent(indent, wr); wr.WriteLine("{0} = {1};", lvalues[j], outTmps[j]); } } + return wr; } /// /// Before calling TrAssignmentRhs(rhs), the caller must have spilled the let variables declared in "rhs". /// - void TrAssignmentRhs(AssignmentRhs rhs) { + void TrAssignmentRhs(AssignmentRhs rhs, TextWriter wr) { Contract.Requires(rhs != null); Contract.Requires(!(rhs is HavocRhs)); if (rhs is ExprRhs) { ExprRhs e = (ExprRhs)rhs; - TrExpr(e.Expr); + TrExpr(e.Expr, wr, false); } else { TypeRhs tp = (TypeRhs)rhs; if (tp.ArrayDimensions == null) { - wr.Write("new {0}()", TypeName(tp.EType)); + wr.Write("new {0}()", TypeName(tp.EType, wr)); } else { if (tp.EType.IsIntegerType || tp.EType.IsTypeParameter) { // Because the default constructor for BigInteger does not generate a valid BigInteger, we have // to excplicitly initialize the elements of an integer array. This is all done in a helper routine. - wr.Write("Dafny.Helpers.InitNewArray{0}<{1}>", tp.ArrayDimensions.Count, TypeName(tp.EType)); + wr.Write("Dafny.Helpers.InitNewArray{0}<{1}>", tp.ArrayDimensions.Count, TypeName(tp.EType, wr)); string prefix = "("; foreach (Expression dim in tp.ArrayDimensions) { wr.Write(prefix); - TrParenExpr(dim); + TrParenExpr(dim, wr, false); prefix = ", "; } wr.Write(")"); } else { - wr.Write("new {0}", TypeName(tp.EType)); + wr.Write("new {0}", TypeName(tp.EType, wr)); string prefix = "["; foreach (Expression dim in tp.ArrayDimensions) { wr.Write("{0}(int)", prefix); - TrParenExpr(dim); + TrParenExpr(dim, wr, false); prefix = ", "; } wr.Write("]"); @@ -1958,34 +2231,43 @@ } } - void TrStmtList(List/*!*/ stmts, int indent) {Contract.Requires(cce.NonNullElements(stmts)); + void TrStmtList(List/*!*/ stmts, int indent, TextWriter writer) {Contract.Requires(cce.NonNullElements(stmts)); foreach (Statement ss in stmts) { - TrStmt(ss, indent + IndentAmount); + copyInstrWriter.Clear(); + TextWriter wr = TrStmt(ss, indent + IndentAmount); + // write out any copy instructions that copies the out param + // used in letexpr to a local + string copyInstr = copyInstrWriter.ToString(); + if (copyInstr != "") { + Indent(indent + IndentAmount, writer); + writer.Write(copyInstrWriter.ToString()); + } + writer.Write(wr.ToString()); if (ss.Labels != null) { - Indent(indent); // labels are not indented as much as the statements - wr.WriteLine("after_{0}: ;", ss.Labels.Data.AssignUniqueId("after_", idGenerator)); + Indent(indent, writer); // labels are not indented as much as the statements + writer.WriteLine("after_{0}: ;", ss.Labels.Data.AssignUniqueId("after_", idGenerator)); } } } - void TrLocalVar(LocalVariable s, bool alwaysInitialize, int indent) { - Contract.Requires(s != null); - if (s.IsGhost) { + void TrLocalVar(IVariable v, bool alwaysInitialize, int indent, TextWriter wr) { + Contract.Requires(v != null); + if (v.IsGhost) { // only emit non-ghosts (we get here only for local variables introduced implicitly by call statements) return; } - Indent(indent); - wr.Write("{0} @{1}", TypeName(s.Type), s.CompileName); + Indent(indent, wr); + wr.Write("{0} @{1}", TypeName(v.Type, wr), v.CompileName); if (alwaysInitialize) { // produce a default value - wr.WriteLine(" = {0};", DefaultValue(s.Type)); + wr.WriteLine(" = {0};", DefaultValue(v.Type, wr)); } else { wr.WriteLine(";"); } } - void MatchCasePrelude(string source, UserDefinedType sourceType, DatatypeCtor ctor, List/*!*/ arguments, int caseIndex, int caseCount, int indent) { + void MatchCasePrelude(string source, UserDefinedType sourceType, DatatypeCtor ctor, List/*!*/ arguments, int caseIndex, int caseCount, int indent, TextWriter wr) { Contract.Requires(source != null); Contract.Requires(sourceType != null); Contract.Requires(ctor != null); @@ -1994,7 +2276,7 @@ // if (source.is_Ctor0) { // FormalType f0 = ((Dt_Ctor0)source._D).a0; // ... - Indent(indent); + Indent(indent, wr); wr.Write("{0}if (", caseIndex == 0 ? "" : "} else "); if (caseIndex == caseCount - 1) { wr.Write("true"); @@ -2009,9 +2291,9 @@ if (!arg.IsGhost) { BoundVar bv = arguments[m]; // FormalType f0 = ((Dt_Ctor0)source._D).a0; - Indent(indent + IndentAmount); + Indent(indent + IndentAmount, wr); wr.WriteLine("{0} @{1} = (({2}){3}._D).@{4};", - TypeName(bv.Type), bv.CompileName, DtCtorName(ctor, sourceType.TypeArgs), source, FormalName(arg, k)); + TypeName(bv.Type, wr), bv.CompileName, DtCtorName(ctor, sourceType.TypeArgs, wr), source, FormalName(arg, k)); k++; } } @@ -2022,51 +2304,51 @@ /// /// Before calling TrParenExpr(expr), the caller must have spilled the let variables declared in "expr". /// - void TrParenExpr(string prefix, Expression expr) { + void TrParenExpr(string prefix, Expression expr, TextWriter wr, bool inLetExprBody) { Contract.Requires(prefix != null); Contract.Requires(expr != null); wr.Write(prefix); - TrParenExpr(expr); + TrParenExpr(expr, wr, inLetExprBody); } /// /// Before calling TrParenExpr(expr), the caller must have spilled the let variables declared in "expr". /// - void TrParenExpr(Expression expr) { + void TrParenExpr(Expression expr, TextWriter wr, bool inLetExprBody) { Contract.Requires(expr != null); wr.Write("("); - TrExpr(expr); + TrExpr(expr, wr, inLetExprBody); wr.Write(")"); } /// /// Before calling TrExprList(exprs), the caller must have spilled the let variables declared in expressions in "exprs". /// - void TrExprList(List/*!*/ exprs) { + void TrExprList(List/*!*/ exprs, TextWriter wr, bool inLetExprBody) { Contract.Requires(cce.NonNullElements(exprs)); wr.Write("("); string sep = ""; foreach (Expression e in exprs) { wr.Write(sep); - TrExpr(e); + TrExpr(e, wr, inLetExprBody); sep = ", "; } wr.Write(")"); } - void TrExprPairList(List/*!*/ exprs) { + void TrExprPairList(List/*!*/ exprs, TextWriter wr, bool inLetExprBody) { Contract.Requires(cce.NonNullElements(exprs)); wr.Write("("); string sep = ""; foreach (ExpressionPair p in exprs) { wr.Write(sep); wr.Write("new Dafny.Pair<"); - wr.Write(TypeName(p.A.Type)); + wr.Write(TypeName(p.A.Type, wr)); wr.Write(","); - wr.Write(TypeName(p.B.Type)); + wr.Write(TypeName(p.B.Type, wr)); wr.Write(">("); - TrExpr(p.A); + TrExpr(p.A, wr, inLetExprBody); wr.Write(","); - TrExpr(p.B); + TrExpr(p.B, wr, inLetExprBody); wr.Write(")"); sep = ", "; } @@ -2076,13 +2358,15 @@ /// /// Before calling TrExpr(expr), the caller must have spilled the let variables declared in "expr". /// - void TrExpr(Expression expr) + void TrExpr(Expression expr, TextWriter wr, bool inLetExprBody) { Contract.Requires(expr != null); if (expr is LiteralExpr) { LiteralExpr e = (LiteralExpr)expr; - if (e.Value == null) { - wr.Write("({0})null", TypeName(e.Type)); + if (e is StaticReceiverExpr) { + wr.Write(TypeName(e.Type, wr)); + } else if (e.Value == null) { + wr.Write("({0})null", TypeName(e.Type, wr)); } else if (e.Value is bool) { wr.Write((bool)e.Value ? "true" : "false"); } else if (e is CharLiteralExpr) { @@ -2123,41 +2407,49 @@ } else if (expr is IdentifierExpr) { var e = (IdentifierExpr)expr; - wr.Write("@" + e.Var.CompileName); - + if (e.Var is Formal && inLetExprBody && !((Formal)e.Var).InParam) { + // out param in letExpr body, need to copy it to a temp since + // letExpr body is translated to an anonymous function that doesn't + // allow out parameters + var name = string.Format("_pat_let_tv{0}", GetUniqueAstNumber(e)); + wr.Write("@" + name); + copyInstrWriter.Append("var @" + name + "= @" + e.Var.CompileName + ";\n"); + } else { + wr.Write("@" + e.Var.CompileName); + } } else if (expr is SetDisplayExpr) { var e = (SetDisplayExpr)expr; var elType = e.Type.AsSetType.Arg; - wr.Write("{0}<{1}>.FromElements", DafnySetClass, TypeName(elType)); - TrExprList(e.Elements); + wr.Write("{0}<{1}>.FromElements", DafnySetClass, TypeName(elType, wr)); + TrExprList(e.Elements, wr, inLetExprBody); } else if (expr is MultiSetDisplayExpr) { var e = (MultiSetDisplayExpr)expr; var elType = e.Type.AsMultiSetType.Arg; - wr.Write("{0}<{1}>.FromElements", DafnyMultiSetClass, TypeName(elType)); - TrExprList(e.Elements); + wr.Write("{0}<{1}>.FromElements", DafnyMultiSetClass, TypeName(elType, wr)); + TrExprList(e.Elements, wr, inLetExprBody); } else if (expr is SeqDisplayExpr) { var e = (SeqDisplayExpr)expr; var elType = e.Type.AsSeqType.Arg; - wr.Write("{0}<{1}>.FromElements", DafnySeqClass, TypeName(elType)); - TrExprList(e.Elements); + wr.Write("{0}<{1}>.FromElements", DafnySeqClass, TypeName(elType, wr)); + TrExprList(e.Elements, wr, inLetExprBody); } else if (expr is MapDisplayExpr) { MapDisplayExpr e = (MapDisplayExpr)expr; - wr.Write("{0}.FromElements", TypeName(e.Type)); - TrExprPairList(e.Elements); + wr.Write("{0}.FromElements", TypeName(e.Type, wr)); + TrExprPairList(e.Elements, wr, inLetExprBody); } else if (expr is MemberSelectExpr) { MemberSelectExpr e = (MemberSelectExpr)expr; SpecialField sf = e.Member as SpecialField; if (sf != null) { wr.Write(sf.PreString); - TrParenExpr(e.Obj); + TrParenExpr(e.Obj, wr, inLetExprBody); wr.Write(".@{0}", sf.CompiledName); wr.Write(sf.PostString); } else { - TrParenExpr(e.Obj); + TrExpr(e.Obj, wr, inLetExprBody); wr.Write(".@{0}", e.Member.CompileName); } @@ -2167,50 +2459,50 @@ if (e.Seq.Type.IsArrayType) { if (e.SelectOne) { Contract.Assert(e.E0 != null && e.E1 == null); - TrParenExpr(e.Seq); + TrParenExpr(e.Seq, wr, inLetExprBody); wr.Write("[(int)"); - TrParenExpr(e.E0); + TrParenExpr(e.E0, wr, inLetExprBody); wr.Write("]"); } else { - TrParenExpr("Dafny.Helpers.SeqFromArray", e.Seq); + TrParenExpr("Dafny.Helpers.SeqFromArray", e.Seq, wr, inLetExprBody); if (e.E1 != null) { - TrParenExpr(".Take", e.E1); + TrParenExpr(".Take", e.E1, wr, inLetExprBody); } if (e.E0 != null) { - TrParenExpr(".Drop", e.E0); + TrParenExpr(".Drop", e.E0, wr, inLetExprBody); } } } else if (e.SelectOne) { Contract.Assert(e.E0 != null && e.E1 == null); - TrParenExpr(e.Seq); - TrParenExpr(".Select", e.E0); + TrParenExpr(e.Seq, wr, inLetExprBody); + TrParenExpr(".Select", e.E0, wr, inLetExprBody); } else { - TrParenExpr(e.Seq); + TrParenExpr(e.Seq, wr, inLetExprBody); if (e.E1 != null) { - TrParenExpr(".Take", e.E1); + TrParenExpr(".Take", e.E1, wr, inLetExprBody); } if (e.E0 != null) { - TrParenExpr(".Drop", e.E0); + TrParenExpr(".Drop", e.E0, wr, inLetExprBody); } } } else if (expr is MultiSetFormingExpr) { var e = (MultiSetFormingExpr)expr; - wr.Write("{0}<{1}>", DafnyMultiSetClass, TypeName(e.E.Type.AsCollectionType.Arg)); + wr.Write("{0}<{1}>", DafnyMultiSetClass, TypeName(e.E.Type.AsCollectionType.Arg, wr)); var eeType = e.E.Type.NormalizeExpand(); if (eeType is SeqType) { - TrParenExpr(".FromSeq", e.E); + TrParenExpr(".FromSeq", e.E, wr, inLetExprBody); } else if (eeType is SetType) { - TrParenExpr(".FromSet", e.E); + TrParenExpr(".FromSet", e.E, wr, inLetExprBody); } else { Contract.Assert(false); throw new cce.UnreachableException(); } } else if (expr is MultiSelectExpr) { MultiSelectExpr e = (MultiSelectExpr)expr; - TrParenExpr(e.Array); + TrParenExpr(e.Array, wr, inLetExprBody); string prefix = "["; foreach (Expression idx in e.Indices) { wr.Write("{0}(int)", prefix); - TrParenExpr(idx); + TrParenExpr(idx, wr, inLetExprBody); prefix = ", "; } wr.Write("]"); @@ -2219,47 +2511,46 @@ SeqUpdateExpr e = (SeqUpdateExpr)expr; if (e.ResolvedUpdateExpr != null) { - TrExpr(e.ResolvedUpdateExpr); + TrExpr(e.ResolvedUpdateExpr, wr, inLetExprBody); } else { - TrParenExpr(e.Seq); + TrParenExpr(e.Seq, wr, inLetExprBody); wr.Write(".Update("); - TrExpr(e.Index); + TrExpr(e.Index, wr, inLetExprBody); wr.Write(", "); - TrExpr(e.Value); + TrExpr(e.Value, wr, inLetExprBody); wr.Write(")"); } } else if (expr is FunctionCallExpr) { FunctionCallExpr e = (FunctionCallExpr)expr; - CompileFunctionCallExpr(e, wr, TrExpr); + CompileFunctionCallExpr(e, wr, wr, inLetExprBody, TrExpr); } else if (expr is ApplyExpr) { var e = expr as ApplyExpr; wr.Write("Dafny.Helpers.Id<"); - wr.Write(TypeName(e.Function.Type)); + wr.Write(TypeName(e.Function.Type, wr)); wr.Write(">("); - TrExpr(e.Function); + TrExpr(e.Function, wr, inLetExprBody); wr.Write(")"); - TrExprList(e.Args); + TrExprList(e.Args, wr, inLetExprBody); } else if (expr is DatatypeValue) { DatatypeValue dtv = (DatatypeValue)expr; Contract.Assert(dtv.Ctor != null); // since dtv has been successfully resolved - var typeParams = dtv.InferredTypeArgs.Count == 0 ? "" : string.Format("<{0}>", TypeNames(dtv.InferredTypeArgs)); - - wr.Write("new {0}{1}(", DtName(dtv.Ctor.EnclosingDatatype), typeParams); + var typeParams = dtv.InferredTypeArgs.Count == 0 ? "" : string.Format("<{0}>", TypeNames(dtv.InferredTypeArgs, wr)); + wr.Write("new @{0}{1}(", DtName(dtv.Ctor.EnclosingDatatype), typeParams); if (!dtv.IsCoCall) { // For an ordinary constructor (that is, one that does not guard any co-recursive calls), generate: // new Dt_Cons( args ) - wr.Write("new {0}(", DtCtorName(dtv.Ctor, dtv.InferredTypeArgs)); + wr.Write("new {0}(", DtCtorName(dtv.Ctor, dtv.InferredTypeArgs, wr)); string sep = ""; for (int i = 0; i < dtv.Arguments.Count; i++) { Formal formal = dtv.Ctor.Formals[i]; if (!formal.IsGhost) { wr.Write(sep); - TrExpr(dtv.Arguments[i]); + TrExpr(dtv.Arguments[i], wr, inLetExprBody); sep = ", "; } } @@ -2288,17 +2579,17 @@ arg = varName; wr.Write("var {0} = ", varName); - TrExpr(actual); + TrExpr(actual, wr, inLetExprBody); wr.Write("; "); } else { var sw = new StringWriter(); - CompileFunctionCallExpr(fce, sw, (exp) => { + CompileFunctionCallExpr(fce, sw, wr, inLetExprBody, (exp, wrr, inLetExpr) => { string varName = idGenerator.FreshId("_ac"); sw.Write(varName); - wr.Write("var {0} = ", varName); - TrExpr(exp); - wr.Write("; "); + wrr.Write("var {0} = ", varName); + TrExpr(exp, wrr, inLetExpr); + wrr.Write("; "); }); arg = sw.ToString(); @@ -2310,7 +2601,7 @@ wr.Write("return () => { return "); - wr.Write("new {0}({1}", DtCtorName(dtv.Ctor, dtv.InferredTypeArgs), args); + wr.Write("new {0}({1}", DtCtorName(dtv.Ctor, dtv.InferredTypeArgs, wr), args); wr.Write("); }; })())"); } wr.Write(")"); @@ -2323,11 +2614,11 @@ switch (e.Op) { case UnaryOpExpr.Opcode.Not: wr.Write("!"); - TrParenExpr(e.E); + TrParenExpr(e.E, wr, inLetExprBody); break; case UnaryOpExpr.Opcode.Cardinality: wr.Write("new BigInteger("); - TrParenExpr(e.E); + TrParenExpr(e.E, wr, inLetExprBody); wr.Write(".Length)"); break; default: @@ -2345,7 +2636,7 @@ if (AsNativeType(e.E.Type) != null) { wr.Write("new BigInteger"); } - TrParenExpr(e.E); + TrParenExpr(e.E, wr, inLetExprBody); }; Action toIntCast = () => { Contract.Assert(toInt); @@ -2361,7 +2652,7 @@ } else if (!fromInt && toInt) { // real -> int toIntCast(); - TrParenExpr(e.E); + TrParenExpr(e.E, wr, inLetExprBody); wr.Write(".ToBigInteger()"); } else if (AsNativeType(e.ToType) != null) { toIntCast(); @@ -2373,14 +2664,14 @@ wr.Write("(" + (BigInteger)lit.Value + AsNativeType(e.ToType).Suffix + ")"); } else if ((u != null && u.Op == UnaryOpExpr.Opcode.Cardinality) || (m != null && m.MemberName == "Length" && m.Obj.Type.IsArrayType)) { // Optimize .Length to avoid intermediate BigInteger - TrParenExpr((u != null) ? u.E : m.Obj); + TrParenExpr((u != null) ? u.E : m.Obj, wr, inLetExprBody); if (AsNativeType(e.ToType).UpperBound <= new BigInteger(0x80000000U)) { wr.Write(".Length"); } else { wr.Write(".LongLength"); } } else { - TrParenExpr(e.E); + TrParenExpr(e.E, wr, inLetExprBody); } } else if (e.ToType.IsIntegerType && AsNativeType(e.E.Type) != null) { fromIntAsBigInteger(); @@ -2388,7 +2679,7 @@ Contract.Assert(fromInt == toInt); Contract.Assert(AsNativeType(e.ToType) == null); Contract.Assert(AsNativeType(e.E.Type) == null); - TrParenExpr(e.E); + TrParenExpr(e.E, wr, inLetExprBody); } } else if (expr is BinaryExpr) { @@ -2408,27 +2699,27 @@ opString = "&&"; break; case BinaryExpr.ResolvedOpcode.EqCommon: { - if (e.E0.Type.IsDatatype || e.E0.Type.IsTypeParameter || e.E0.Type.SupportsEquality) { - callString = "Equals"; - } else if (e.E0.Type.IsRefType) { + if (e.E0.Type.IsRefType) { // Dafny's type rules are slightly different C#, so we may need a cast here. // For example, Dafny allows x==y if x:array and y:array and T is some // type parameter. opString = "== (object)"; + } else if (e.E0.Type.IsDatatype || e.E0.Type.IsTypeParameter || e.E0.Type.SupportsEquality) { + callString = "Equals"; } else { opString = "=="; } break; } case BinaryExpr.ResolvedOpcode.NeqCommon: { - if (e.E0.Type.IsDatatype || e.E0.Type.IsTypeParameter || e.E0.Type.SupportsEquality) { - preOpString = "!"; - callString = "Equals"; - } else if (e.E0.Type.IsRefType) { + if (e.E0.Type.IsRefType) { // Dafny's type rules are slightly different C#, so we may need a cast here. // For example, Dafny allows x==y if x:array and y:array and T is some // type parameter. opString = "!= (object)"; + } else if (e.E0.Type.IsDatatype || e.E0.Type.IsTypeParameter || e.E0.Type.SupportsEquality) { + preOpString = "!"; + callString = "Equals"; } else { opString = "!="; } @@ -2457,9 +2748,9 @@ if (expr.Type.IsIntegerType || (AsNativeType(expr.Type) != null && AsNativeType(expr.Type).LowerBound < BigInteger.Zero)) { string suffix = AsNativeType(expr.Type) != null ? ("_" + AsNativeType(expr.Type).Name) : ""; wr.Write("Dafny.Helpers.EuclideanDivision" + suffix + "("); - TrParenExpr(e.E0); + TrParenExpr(e.E0, wr, inLetExprBody); wr.Write(", "); - TrExpr(e.E1); + TrExpr(e.E1, wr, inLetExprBody); wr.Write(")"); } else { opString = "/"; // for reals @@ -2469,9 +2760,9 @@ if (expr.Type.IsIntegerType || (AsNativeType(expr.Type) != null && AsNativeType(expr.Type).LowerBound < BigInteger.Zero)) { string suffix = AsNativeType(expr.Type) != null ? ("_" + AsNativeType(expr.Type).Name) : ""; wr.Write("Dafny.Helpers.EuclideanModulus" + suffix + "("); - TrParenExpr(e.E0); + TrParenExpr(e.E0, wr, inLetExprBody); wr.Write(", "); - TrExpr(e.E1); + TrExpr(e.E1, wr, inLetExprBody); wr.Write(")"); } else { opString = "%"; // for reals @@ -2506,18 +2797,18 @@ case BinaryExpr.ResolvedOpcode.InSet: case BinaryExpr.ResolvedOpcode.InMultiSet: case BinaryExpr.ResolvedOpcode.InMap: - TrParenExpr(e.E1); + TrParenExpr(e.E1, wr, inLetExprBody); wr.Write(".Contains("); - TrExpr(e.E0); + TrExpr(e.E0, wr, inLetExprBody); wr.Write(")"); break; case BinaryExpr.ResolvedOpcode.NotInSet: case BinaryExpr.ResolvedOpcode.NotInMultiSet: case BinaryExpr.ResolvedOpcode.NotInMap: wr.Write("!"); - TrParenExpr(e.E1); + TrParenExpr(e.E1, wr, inLetExprBody); wr.Write(".Contains("); - TrExpr(e.E0); + TrExpr(e.E0, wr, inLetExprBody); wr.Write(")"); break; case BinaryExpr.ResolvedOpcode.Union: @@ -2537,16 +2828,16 @@ case BinaryExpr.ResolvedOpcode.Concat: callString = "Concat"; break; case BinaryExpr.ResolvedOpcode.InSeq: - TrParenExpr(e.E1); + TrParenExpr(e.E1, wr, inLetExprBody); wr.Write(".Contains("); - TrExpr(e.E0); + TrExpr(e.E0, wr, inLetExprBody); wr.Write(")"); break; case BinaryExpr.ResolvedOpcode.NotInSeq: wr.Write("!"); - TrParenExpr(e.E1); + TrParenExpr(e.E1, wr, inLetExprBody); wr.Write(".Contains("); - TrExpr(e.E0); + TrExpr(e.E0, wr, inLetExprBody); wr.Write(")"); break; @@ -2560,17 +2851,17 @@ wr.Write("(" + nativeType.Name + ")("); } wr.Write(preOpString); - TrParenExpr(e.E0); + TrParenExpr(e.E0, wr, inLetExprBody); wr.Write(" {0} ", opString); - TrParenExpr(e.E1); + TrParenExpr(e.E1, wr, inLetExprBody); if (needsCast) { wr.Write(")"); } } else if (callString != null) { wr.Write(preOpString); - TrParenExpr(e.E0); + TrParenExpr(e.E0, wr, inLetExprBody); wr.Write(".@{0}(", callString); - TrExpr(e.E1); + TrExpr(e.E1, wr, inLetExprBody); wr.Write(")"); } @@ -2593,17 +2884,18 @@ if (Contract.Exists(lhs.Vars, bv => !bv.IsGhost)) { var rhsName = string.Format("_pat_let{0}_{1}", GetUniqueAstNumber(e), i); wr.Write("Dafny.Helpers.Let<"); - wr.Write(TypeName(e.RHSs[i].Type) + "," + TypeName(e.Body.Type)); + wr.Write(TypeName(e.RHSs[i].Type, wr) + "," + TypeName(e.Body.Type, wr)); wr.Write(">("); - TrExpr(e.RHSs[i]); + TrExpr(e.RHSs[i], wr, inLetExprBody); wr.Write(", " + rhsName + " => "); neededCloseParens++; - var c = TrCasePattern(lhs, rhsName, e.Body.Type); + var c = TrCasePattern(lhs, rhsName, e.Body.Type, wr); Contract.Assert(c != 0); // we already checked that there's at least one non-ghost neededCloseParens += c; } } - TrExpr(e.Body); + + TrExpr(e.Body, wr, true); for (int i = 0; i < neededCloseParens; i++) { wr.Write(")"); } @@ -2612,7 +2904,7 @@ // ghost var x,y :| Constraint; E // is compiled just like E is, because the resolver has already checked that x,y (or other ghost variables, for that matter) don't // occur in E (moreover, the verifier has checked that values for x,y satisfying Constraint exist). - TrExpr(e.Body); + TrExpr(e.Body, wr, inLetExprBody); } else { // The Dafny "let" expression // var x,y :| Constraint; E @@ -2627,17 +2919,17 @@ Contract.Assert(e.RHSs.Count == 1); // checked by resolution if (e.Constraint_MissingBounds != null) { foreach (var bv in e.Constraint_MissingBounds) { - Error("this let-such-that expression is too advanced for the current compiler; Dafny's heuristics cannot find any bound for variable '{0}' (line {1})", bv.Name, e.tok.line); + Error("this let-such-that expression is too advanced for the current compiler; Dafny's heuristics cannot find any bound for variable '{0}' (line {1})", wr, bv.Name, e.tok.line); } } else { - wr.Write("Dafny.Helpers.Let(0, _let_dummy_" + GetUniqueAstNumber(e) + " => {"); + wr.Write("Dafny.Helpers.Let(0, _let_dummy_" + GetUniqueAstNumber(e) + " => {"); foreach (var bv in e.BoundVars) { - wr.Write("{0} @{1}", TypeName(bv.Type), bv.CompileName); - wr.WriteLine(" = {0};", DefaultValue(bv.Type)); + wr.Write("{0} @{1}", TypeName(bv.Type, wr), bv.CompileName); + wr.WriteLine(" = {0};", DefaultValue(bv.Type, wr)); } - TrAssignSuchThat(0, new List(e.BoundVars).ConvertAll(bv => (IVariable)bv), e.RHSs[0], e.Constraint_Bounds, e.tok.line); + TrAssignSuchThat(0, new List(e.BoundVars).ConvertAll(bv => (IVariable)bv), e.RHSs[0], e.Constraint_Bounds, e.tok.line, wr, inLetExprBody); wr.Write(" return "); - TrExpr(e.Body); + TrExpr(e.Body, wr, true); wr.Write("; })"); } } @@ -2657,7 +2949,7 @@ // }(src) string source = idGenerator.FreshId("_source"); - wr.Write("new Dafny.Helpers.Function<{0}, {1}>(delegate ({0} {2}) {{ ", TypeName(e.Source.Type), TypeName(e.Type), source); + wr.Write("new Dafny.Helpers.Function<{0}, {1}>(delegate ({0} {2}) {{ ", TypeName(e.Source.Type, wr), TypeName(e.Type, wr), source); if (e.Cases.Count == 0) { // the verifier would have proved we never get here; still, we need some code that will compile @@ -2666,9 +2958,9 @@ int i = 0; var sourceType = (UserDefinedType)e.Source.Type.NormalizeExpand(); foreach (MatchCaseExpr mc in e.Cases) { - MatchCasePrelude(source, sourceType, cce.NonNull(mc.Ctor), mc.Arguments, i, e.Cases.Count, 0); + MatchCasePrelude(source, sourceType, cce.NonNull(mc.Ctor), mc.Arguments, i, e.Cases.Count, 0, wr); wr.Write("return "); - TrExpr(mc.Body); + TrExpr(mc.Body, wr, inLetExprBody); wr.Write("; "); i++; } @@ -2676,11 +2968,14 @@ } // We end with applying the source expression to the delegate we just built wr.Write("})("); - TrExpr(e.Source); + TrExpr(e.Source, wr, inLetExprBody); wr.Write(")"); } else if (expr is QuantifierExpr) { var e = (QuantifierExpr)expr; + + // Compilation does not check whether a quantifier was split. + Contract.Assert(e.Bounds != null); // for non-ghost quantifiers, the resolver would have insisted on finding bounds var n = e.BoundVars.Count; Contract.Assert(e.Bounds.Count == n); @@ -2690,27 +2985,29 @@ // emit: Dafny.Helpers.QuantX(boundsInformation, isForall, bv => body) if (bound is ComprehensionExpr.BoolBoundedPool) { wr.Write("Dafny.Helpers.QuantBool("); + } else if (bound is ComprehensionExpr.CharBoundedPool) { + wr.Write("Dafny.Helpers.QuantChar("); } else if (bound is ComprehensionExpr.IntBoundedPool) { var b = (ComprehensionExpr.IntBoundedPool)bound; wr.Write("Dafny.Helpers.QuantInt("); - TrExpr(b.LowerBound); + TrExpr(b.LowerBound, wr, inLetExprBody); wr.Write(", "); - TrExpr(b.UpperBound); + TrExpr(b.UpperBound, wr, inLetExprBody); wr.Write(", "); } else if (bound is ComprehensionExpr.SetBoundedPool) { var b = (ComprehensionExpr.SetBoundedPool)bound; wr.Write("Dafny.Helpers.QuantSet("); - TrExpr(b.Set); + TrExpr(b.Set, wr, inLetExprBody); wr.Write(", "); } else if (bound is ComprehensionExpr.MapBoundedPool) { var b = (ComprehensionExpr.MapBoundedPool)bound; wr.Write("Dafny.Helpers.QuantMap("); - TrExpr(b.Map); + TrExpr(b.Map, wr, inLetExprBody); wr.Write(", "); } else if (bound is ComprehensionExpr.SeqBoundedPool) { var b = (ComprehensionExpr.SeqBoundedPool)bound; wr.Write("Dafny.Helpers.QuantSeq("); - TrExpr(b.Seq); + TrExpr(b.Seq, wr, inLetExprBody); wr.Write(", "); } else if (bound is ComprehensionExpr.DatatypeBoundedPool) { var b = (ComprehensionExpr.DatatypeBoundedPool)bound; @@ -2723,7 +3020,7 @@ wr.Write("{0}, ", expr is ForallExpr ? "true" : "false"); wr.Write("@{0} => ", bv.CompileName); } - TrExpr(e.LogicalBody()); + TrExpr(e.LogicalBody(true), wr, inLetExprBody); for (int i = 0; i < n; i++) { wr.Write(")"); } @@ -2747,9 +3044,10 @@ // return Dafny.Set.FromCollection(_coll); // })() Contract.Assert(e.Bounds != null); // the resolver would have insisted on finding bounds - var typeName = TypeName(e.Type.AsSetType.Arg); + var typeName = TypeName(e.Type.AsSetType.Arg, wr); + var collection_name = idGenerator.FreshId("_coll"); wr.Write("((Dafny.Helpers.ComprehensionDelegate<{0}>)delegate() {{ ", typeName); - wr.Write("var _coll = new System.Collections.Generic.List<{0}>(); ", typeName); + wr.Write("var {0} = new System.Collections.Generic.List<{1}>(); ", collection_name, typeName); var n = e.BoundVars.Count; Contract.Assert(e.Bounds.Count == n); for (int i = 0; i < n; i++) { @@ -2757,44 +3055,51 @@ var bv = e.BoundVars[i]; if (bound is ComprehensionExpr.BoolBoundedPool) { wr.Write("foreach (var @{0} in Dafny.Helpers.AllBooleans) {{ ", bv.CompileName); + } else if (bound is ComprehensionExpr.CharBoundedPool) { + wr.Write("foreach (var @{0} in Dafny.Helpers.AllChars) {{ ", bv.CompileName); } else if (bound is ComprehensionExpr.IntBoundedPool) { var b = (ComprehensionExpr.IntBoundedPool)bound; - wr.Write("for (var @{0} = ", bv.CompileName); - TrExpr(b.LowerBound); - wr.Write("; @{0} < ", bv.CompileName); - TrExpr(b.UpperBound); - wr.Write("; @{0}++) {{ ", bv.CompileName); + if (AsNativeType(bv.Type) != null) { + wr.Write("foreach (var @{0} in @{1}.IntegerRange(", bv.CompileName, bv.Type.AsNewtype.FullCompileName); + } else { + wr.Write("foreach (var @{0} in Dafny.Helpers.IntegerRange(", bv.CompileName); + } + TrExpr(b.LowerBound, wr, inLetExprBody); + wr.Write(", "); + TrExpr(b.UpperBound, wr, inLetExprBody); + wr.Write(")) { "); } else if (bound is ComprehensionExpr.SetBoundedPool) { var b = (ComprehensionExpr.SetBoundedPool)bound; wr.Write("foreach (var @{0} in (", bv.CompileName); - TrExpr(b.Set); + TrExpr(b.Set, wr, inLetExprBody); wr.Write(").Elements) { "); } else if (bound is ComprehensionExpr.MapBoundedPool) { var b = (ComprehensionExpr.MapBoundedPool)bound; wr.Write("foreach (var @{0} in (", bv.CompileName); - TrExpr(b.Map); + TrExpr(b.Map, wr, inLetExprBody); wr.Write(").Domain) { "); } else if (bound is ComprehensionExpr.SeqBoundedPool) { var b = (ComprehensionExpr.SeqBoundedPool)bound; wr.Write("foreach (var @{0} in (", bv.CompileName); - TrExpr(b.Seq); + TrExpr(b.Seq, wr, inLetExprBody); wr.Write(").Elements) { "); } else if (bound is ComprehensionExpr.DatatypeBoundedPool) { var b = (ComprehensionExpr.DatatypeBoundedPool)bound; - wr.Write("foreach (var @{0} in {1}.AllSingletonConstructors) {{", bv.CompileName, TypeName(bv.Type)); + wr.Write("foreach (var @{0} in {1}.AllSingletonConstructors) {{", bv.CompileName, TypeName(bv.Type, wr)); } else { Contract.Assert(false); throw new cce.UnreachableException(); // unexpected BoundedPool type } } wr.Write("if ("); - TrExpr(e.Range); - wr.Write(") { _coll.Add("); - TrExpr(e.Term); + TrExpr(e.Range, wr, inLetExprBody); + wr.Write(") {"); + wr.Write("{0}.Add(", collection_name); + TrExpr(e.Term, wr, inLetExprBody); wr.Write("); }"); for (int i = 0; i < n; i++) { wr.Write("}"); } - wr.Write("return Dafny.Set<{0}>.FromCollection(_coll); ", typeName); + wr.Write("return Dafny.Set<{0}>.FromCollection({1}); ", typeName, collection_name); wr.Write("})()"); } else if (expr is MapComprehension) { @@ -2816,58 +3121,63 @@ // return Dafny.Map.FromElements(_coll); // })() Contract.Assert(e.Bounds != null); // the resolver would have insisted on finding bounds - var domtypeName = TypeName(e.Type.AsMapType.Domain); - var rantypeName = TypeName(e.Type.AsMapType.Range); + var domtypeName = TypeName(e.Type.AsMapType.Domain, wr); + var rantypeName = TypeName(e.Type.AsMapType.Range, wr); + var collection_name = idGenerator.FreshId("_coll"); wr.Write("((Dafny.Helpers.MapComprehensionDelegate<{0},{1}>)delegate() {{ ", domtypeName, rantypeName); - wr.Write("var _coll = new System.Collections.Generic.List>(); ", domtypeName, rantypeName); + wr.Write("var {0} = new System.Collections.Generic.List>(); ", collection_name, domtypeName, rantypeName); var n = e.BoundVars.Count; Contract.Assert(e.Bounds.Count == n && n == 1); var bound = e.Bounds[0]; var bv = e.BoundVars[0]; if (bound is ComprehensionExpr.BoolBoundedPool) { wr.Write("foreach (var @{0} in Dafny.Helpers.AllBooleans) {{ ", bv.CompileName); + } else if (bound is ComprehensionExpr.CharBoundedPool) { + wr.Write("foreach (var @{0} in Dafny.Helpers.AllChars) {{ ", bv.CompileName); } else if (bound is ComprehensionExpr.IntBoundedPool) { var b = (ComprehensionExpr.IntBoundedPool)bound; - wr.Write("for (var @{0} = ", bv.CompileName); - TrExpr(b.LowerBound); - wr.Write("; @{0} < ", bv.CompileName); - TrExpr(b.UpperBound); - wr.Write("; @{0}++) {{ ", bv.CompileName); + if (AsNativeType(bv.Type) != null) { + wr.Write("foreach (var @{0} in @{1}.IntegerRange(", bv.CompileName, bv.Type.AsNewtype.FullCompileName); + } else { + wr.Write("foreach (var @{0} in Dafny.Helpers.IntegerRange(", bv.CompileName); + } + TrExpr(b.LowerBound, wr, inLetExprBody); + wr.Write(", "); + TrExpr(b.UpperBound, wr, inLetExprBody); + wr.Write(")) { "); } else if (bound is ComprehensionExpr.SetBoundedPool) { var b = (ComprehensionExpr.SetBoundedPool)bound; wr.Write("foreach (var @{0} in (", bv.CompileName); - TrExpr(b.Set); + TrExpr(b.Set, wr, inLetExprBody); wr.Write(").Elements) { "); } else if (bound is ComprehensionExpr.MapBoundedPool) { var b = (ComprehensionExpr.MapBoundedPool)bound; wr.Write("foreach (var @{0} in (", bv.CompileName); - TrExpr(b.Map); + TrExpr(b.Map, wr, inLetExprBody); wr.Write(").Domain) { "); } else if (bound is ComprehensionExpr.SeqBoundedPool) { var b = (ComprehensionExpr.SeqBoundedPool)bound; wr.Write("foreach (var @{0} in (", bv.CompileName); - TrExpr(b.Seq); + TrExpr(b.Seq, wr, inLetExprBody); wr.Write(").Elements) { "); } else { + // TODO: handle ComprehensionExpr.SubSetBoundedPool Contract.Assert(false); throw new cce.UnreachableException(); // unexpected BoundedPool type } wr.Write("if ("); - TrExpr(e.Range); + TrExpr(e.Range, wr, inLetExprBody); wr.Write(") { "); - wr.Write("_coll.Add(new Dafny.Pair<{0},{1}>(@{2},", domtypeName, rantypeName, bv.CompileName); - TrExpr(e.Term); + wr.Write("{0}.Add(new Dafny.Pair<{1},{2}>(@{3},", collection_name, domtypeName, rantypeName, bv.CompileName); + TrExpr(e.Term, wr, inLetExprBody); wr.Write(")); }"); wr.Write("}"); - wr.Write("return Dafny.Map<{0},{1}>.FromCollection(_coll); ", domtypeName, rantypeName); + wr.Write("return Dafny.Map<{0},{1}>.FromCollection({2}); ", domtypeName, rantypeName, collection_name); wr.Write("})()"); } else if (expr is LambdaExpr) { LambdaExpr e = (LambdaExpr)expr; - ISet fvs = new HashSet(); - bool dontCare = false; - Type dontCareT = null; - Translator.ComputeFreeVariables(expr, fvs, ref dontCare, ref dontCare, ref dontCareT, false); + var fvs = Translator.ComputeFreeVariables(expr); var sm = new Dictionary(); var bvars = new List(); @@ -2887,46 +3197,46 @@ var su = new Translator.Substituter(null, sm, new Dictionary(), null); - BetaRedex(bvars, fexprs, expr.Type, () => { + BetaRedex(bvars, fexprs, expr.Type, wr, inLetExprBody, () => { wr.Write("("); wr.Write(Util.Comma(e.BoundVars, bv => "@" + bv.CompileName)); wr.Write(") => "); - TrExpr(su.Substitute(e.Body)); + TrExpr(su.Substitute(e.Body), wr, inLetExprBody); }); } else if (expr is StmtExpr) { var e = (StmtExpr)expr; - TrExpr(e.E); + TrExpr(e.E, wr, inLetExprBody); } else if (expr is ITEExpr) { ITEExpr e = (ITEExpr)expr; wr.Write("("); - TrExpr(e.Test); + TrExpr(e.Test, wr, inLetExprBody); wr.Write(") ? ("); - TrExpr(e.Thn); + TrExpr(e.Thn, wr, inLetExprBody); wr.Write(") : ("); - TrExpr(e.Els); + TrExpr(e.Els, wr, inLetExprBody); wr.Write(")"); } else if (expr is ConcreteSyntaxExpression) { var e = (ConcreteSyntaxExpression)expr; - TrExpr(e.ResolvedExpression); + TrExpr(e.ResolvedExpression, wr, inLetExprBody); } else if (expr is NamedExpr) { - TrExpr(((NamedExpr)expr).Body); + TrExpr(((NamedExpr)expr).Body, wr, inLetExprBody); } else { Contract.Assert(false); throw new cce.UnreachableException(); // unexpected expression } } - int TrCasePattern(CasePattern pat, string rhsString, Type bodyType) { + int TrCasePattern(CasePattern pat, string rhsString, Type bodyType, TextWriter wr) { Contract.Requires(pat != null); Contract.Requires(rhsString != null); int c = 0; if (pat.Var != null) { var bv = pat.Var; if (!bv.IsGhost) { - wr.Write("Dafny.Helpers.Let<" + TypeName(bv.Type) + "," + TypeName(bodyType) + ">"); + wr.Write("Dafny.Helpers.Let<" + TypeName(bv.Type, wr) + "," + TypeName(bodyType, wr) + ">"); wr.Write("(" + rhsString + ", @" + bv.CompileName + " => "); c++; } @@ -2942,7 +3252,7 @@ // nothing to compile, but do a sanity check Contract.Assert(!Contract.Exists(arg.Vars, bv => !bv.IsGhost)); } else { - c += TrCasePattern(arg, string.Format("(({0})({1})._D).@{2}", DtCtorName(ctor, ((DatatypeValue)pat.Expr).InferredTypeArgs), rhsString, FormalName(formal, k)), bodyType); + c += TrCasePattern(arg, string.Format("(({0})({1})._D).@{2}", DtCtorName(ctor, ((DatatypeValue)pat.Expr).InferredTypeArgs, wr), rhsString, FormalName(formal, k)), bodyType, wr); k++; } } @@ -2950,36 +3260,40 @@ return c; } - delegate void FCE_Arg_Translator(Expression e); + delegate void FCE_Arg_Translator(Expression e, TextWriter wr, bool inLetExpr=false); - void CompileFunctionCallExpr(FunctionCallExpr e, TextWriter twr, FCE_Arg_Translator tr) { + void CompileFunctionCallExpr(FunctionCallExpr e, TextWriter twr, TextWriter wr, bool inLetExprBody, FCE_Arg_Translator tr) { Function f = cce.NonNull(e.Function); if (f.IsStatic) { - twr.Write(TypeName_Companion(e.Receiver.Type)); + twr.Write(TypeName_Companion(e.Receiver.Type, wr)); } else { twr.Write("("); - tr(e.Receiver); + tr(e.Receiver, wr, inLetExprBody); twr.Write(")"); } twr.Write(".@{0}", f.CompileName); + if (f.TypeArgs.Count != 0) { + List typeArgs = f.TypeArgs.ConvertAll(ta => e.TypeArgumentSubstitutions[ta]); + twr.Write("<" + TypeNames(typeArgs, wr) + ">"); + } twr.Write("("); string sep = ""; for (int i = 0; i < e.Args.Count; i++) { if (!e.Function.Formals[i].IsGhost) { twr.Write(sep); - tr(e.Args[i]); + tr(e.Args[i], wr); sep = ", "; } } twr.Write(")"); } - void BetaRedex(List bvars, List exprs, Type bodyType, Action makeBody) { + void BetaRedex(List bvars, List exprs, Type bodyType, TextWriter wr, bool inLetExprBody, Action makeBody) { Contract.Requires(bvars != null); Contract.Requires(exprs != null); Contract.Requires(bvars.Count == exprs.Count); wr.Write("Dafny.Helpers.Id<"); - wr.Write(TypeName_UDT(ArrowType.Arrow_FullCompileName, Util.Snoc(bvars.ConvertAll(bv => bv.Type), bodyType))); + wr.Write(TypeName_UDT(ArrowType.Arrow_FullCompileName, Util.Snoc(bvars.ConvertAll(bv => bv.Type), bodyType), wr)); wr.Write(">(("); wr.Write(Util.Comma(bvars, bv => "@" + bv.CompileName)); wr.Write(") => "); @@ -2987,7 +3301,7 @@ makeBody(); wr.Write(")"); - TrExprList(exprs); + TrExprList(exprs, wr, inLetExprBody); } } diff -Nru dafny-1.9.5/Source/Dafny/DafnyAst.cs dafny-1.9.7/Source/Dafny/DafnyAst.cs --- dafny-1.9.5/Source/Dafny/DafnyAst.cs 2015-05-11 08:03:26.000000000 +0000 +++ dafny-1.9.7/Source/Dafny/DafnyAst.cs 2016-06-05 21:11:14.000000000 +0000 @@ -10,6 +10,7 @@ using System.Numerics; using System.Linq; using Microsoft.Boogie; +using System.Diagnostics; namespace Microsoft.Dafny { public class Program { @@ -26,20 +27,22 @@ public List CompileModules; // filled in during resolution. // Contains the definitions to be used for compilation. - List _additionalInformation = new List(); - public List AdditionalInformation { get { return _additionalInformation; } } public readonly ModuleDecl DefaultModule; public readonly ModuleDefinition DefaultModuleDef; public readonly BuiltIns BuiltIns; public readonly List TranslationTasks; - public Program(string name, [Captured] ModuleDecl module, [Captured] BuiltIns builtIns) { + public readonly ErrorReporter reporter; + + public Program(string name, [Captured] ModuleDecl module, [Captured] BuiltIns builtIns, ErrorReporter reporter) { Contract.Requires(name != null); Contract.Requires(module != null); Contract.Requires(module is LiteralModuleDecl); + Contract.Requires(reporter != null); FullName = name; DefaultModule = module; DefaultModuleDef = (DefaultModuleDecl)((LiteralModuleDecl)module).ModuleDef; BuiltIns = builtIns; + this.reporter = reporter; Modules = new List(); CompileModules = new List(); TranslationTasks = new List(); @@ -86,7 +89,7 @@ public class BuiltIns { - public readonly ModuleDefinition SystemModule = new ModuleDefinition(Token.NoToken, "_System", false, false, null, null, null, true); + public readonly ModuleDefinition SystemModule = new ModuleDefinition(Token.NoToken, "_System", false, false, /*isExclusiveRefinement:*/ false, null, null, null, true); readonly Dictionary arrayTypeDecls = new Dictionary(); readonly Dictionary arrowTypeDecls = new Dictionary(); readonly Dictionary tupleTypeDecls = new Dictionary(); @@ -165,7 +168,7 @@ var argExprs = args.ConvertAll(a => (Expression)new IdentifierExpr(tok, a.Name) { Var = a, Type = a.Type }); var readsIS = new FunctionCallExpr(tok, "reads", new ImplicitThisExpr(tok), tok, argExprs) { - Type = new SetType(new ObjectType()), + Type = new SetType(true, new ObjectType()), }; var readsFrame = new List { new FrameExpression(tok, readsIS, null) }; var req = new Function(tok, "requires", false, false, true, @@ -174,13 +177,13 @@ new Specification(new List(), null), null, null, null); var reads = new Function(tok, "reads", false, false, true, - new List(), args, new SetType(new ObjectType()), + new List(), args, new SetType(true, new ObjectType()), new List(), readsFrame, new List(), new Specification(new List(), null), null, null, null); readsIS.Function = reads; // just so we can really claim the member declarations are resolved readsIS.TypeArgumentSubstitutions = Util.Dict(tps, tys); // ditto - var arrowDecl = new ArrowTypeDecl(tps, req, reads, SystemModule, DontCompile()); + var arrowDecl = new ArrowTypeDecl(tps, req, reads, SystemModule, DontCompile(), null); arrowTypeDecls.Add(arity, arrowDecl); SystemModule.TopLevelDecls.Add(arrowDecl); } @@ -232,21 +235,12 @@ } public static IEnumerable SubExpressions(Attributes attrs) { - for (; attrs != null; attrs = attrs.Prev) { - foreach (var arg in attrs.Args) { - yield return arg; - } - } + return attrs.AsEnumerable().SelectMany(aa => attrs.Args); } public static bool Contains(Attributes attrs, string nm) { Contract.Requires(nm != null); - for (; attrs != null; attrs = attrs.Prev) { - if (attrs.Name == nm) { - return true; - } - } - return false; + return attrs.AsEnumerable().Any(aa => aa.Name == nm); } /// @@ -258,10 +252,10 @@ [Pure] public static bool ContainsBool(Attributes attrs, string nm, ref bool value) { Contract.Requires(nm != null); - for (; attrs != null; attrs = attrs.Prev) { - if (attrs.Name == nm) { - if (attrs.Args.Count == 1) { - var arg = attrs.Args[0] as LiteralExpr; + foreach (var attr in attrs.AsEnumerable()) { + if (attr.Name == nm) { + if (attr.Args.Count == 1) { + var arg = attr.Args[0] as LiteralExpr; if (arg != null && arg.Value is bool) { value = (bool)arg.Value; } @@ -306,12 +300,28 @@ /// public static List FindExpressions(Attributes attrs, string nm) { Contract.Requires(nm != null); + foreach (var attr in attrs.AsEnumerable()) { + if (attr.Name == nm) { + return attr.Args; + } + } + return null; + } + + + /// + /// Same as FindExpressions, but returns all matches + /// + public static List> FindAllExpressions(Attributes attrs, string nm) { + Contract.Requires(nm != null); + List> ret = null; for (; attrs != null; attrs = attrs.Prev) { if (attrs.Name == nm) { - return attrs.Args; + ret = ret ?? new List>(); // Avoid allocating the list in the common case where we don't find nm + ret.Add(attrs.Args); } } - return null; + return ret; } /// @@ -322,13 +332,13 @@ /// - if "allowed" contains Int and Args contains one BigInteger literal, return true and set value to the BigInteger literal. Otherwise, /// - if "allowed" contains String and Args contains one string literal, return true and set value to the string literal. Otherwise, /// - if "allowed" contains Expression and Args contains one element, return true and set value to the one element (of type Expression). Otherwise, - /// - return false, leave value unmodified, and call errorReporter with an error string. + /// - return false, leave value unmodified, and call reporter with an error string. /// public enum MatchingValueOption { Empty, Bool, Int, String, Expression } - public static bool ContainsMatchingValue(Attributes attrs, string nm, ref object value, IEnumerable allowed, Action errorReporter) { + public static bool ContainsMatchingValue(Attributes attrs, string nm, ref object value, IEnumerable allowed, Action reporter) { Contract.Requires(nm != null); Contract.Requires(allowed != null); - Contract.Requires(errorReporter != null); + Contract.Requires(reporter != null); List args = FindExpressions(attrs, nm); if (args == null) { return false; @@ -336,7 +346,7 @@ if (allowed.Contains(MatchingValueOption.Empty)) { return true; } else { - errorReporter("Attribute " + nm + " requires one argument"); + reporter("Attribute " + nm + " requires one argument"); return false; } } else if (args.Count == 1) { @@ -356,16 +366,25 @@ value = arg; return true; } else { - errorReporter("Attribute " + nm + " expects an argument in one of the following categories: " + String.Join(", ", allowed)); + reporter("Attribute " + nm + " expects an argument in one of the following categories: " + String.Join(", ", allowed)); return false; } } else { - errorReporter("Attribute " + nm + " cannot have more than one argument"); + reporter("Attribute " + nm + " cannot have more than one argument"); return false; } } } + internal static class AttributesExtensions { + public static IEnumerable AsEnumerable(this Attributes attr) { + while (attr != null) { + yield return attr; + attr = attr.Prev; + } + } + } + // ------------------------------------------------------------------------------------------------------ public abstract class Type { @@ -412,16 +431,33 @@ var pt = type as TypeProxy; if (pt != null && pt.T != null) { type = pt.T; - } else { + continue; + } var syn = type.AsTypeSynonym; if (syn != null) { var udt = (UserDefinedType)type; // correctness of cast follows from the AsTypeSynonym != null test. // Instantiate with the actual type arguments type = syn.RhsWithArgument(udt.TypeArgs); + continue; + } + if (DafnyOptions.O.IronDafny && type is UserDefinedType) { + var rc = ((UserDefinedType)type).ResolvedClass; + if (rc != null) { + while (rc.ClonedFrom != null || rc.ExclusiveRefinement != null) { + if (rc.ClonedFrom != null) { + rc = (TopLevelDecl)rc.ClonedFrom; } else { - return type; + Contract.Assert(rc.ExclusiveRefinement != null); + rc = rc.ExclusiveRefinement; + } + } + } + if (rc is TypeSynonymDecl) { + type = ((TypeSynonymDecl)rc).Rhs; + continue; } } + return type; } } @@ -490,6 +526,27 @@ } } + public bool HasFinitePossibleValues { + get { + if (IsBoolType || IsCharType || IsRefType) { + return true; + } + var st = AsSetType; + if (st != null && st.Arg.HasFinitePossibleValues) { + return true; + } + var mt = AsMapType; + if (mt != null && mt.Domain.HasFinitePossibleValues) { + return true; + } + var dt = AsDatatype; + if (dt != null && dt.HasFinitePossibleValues) { + return true; + } + return false; + } + } + public CollectionType AsCollectionType { get { return NormalizeExpand() as CollectionType; } } public SetType AsSetType { get { return NormalizeExpand() as SetType; } } public MultiSetType AsMultiSetType { get { return NormalizeExpand() as MultiSetType; } } @@ -535,6 +592,12 @@ return t != null && !t.Finite; } } + public bool IsISetType { + get { + var t = NormalizeExpand() as SetType; + return t != null && !t.Finite; + } + } public NewtypeDecl AsNewtype { get { var udt = NormalizeExpand() as UserDefinedType; @@ -633,7 +696,7 @@ /// public bool IsOrdered { get { - return !IsTypeParameter && !IsCoDatatype && !IsArrowType && !IsIMapType; + return !IsTypeParameter && !IsCoDatatype && !IsArrowType && !IsIMapType && !IsISetType; } } @@ -859,17 +922,25 @@ } public class SetType : CollectionType { - public SetType(Type arg) : base(arg) { + private bool finite; + + public bool Finite { + get { return finite; } + set { finite = value; } } - public override string CollectionTypeName { get { return "set"; } } + + public SetType(bool finite, Type arg) : base(arg) { + this.finite = finite; + } + public override string CollectionTypeName { get { return finite ? "set" : "iset"; } } [Pure] public override bool Equals(Type that) { var t = that.NormalizeExpand() as SetType; - return t != null && Arg.Equals(t.Arg); + return t != null && Finite == t.Finite && Arg.Equals(t.Arg); } public override bool PossiblyEquals_W(Type that) { var t = that as SetType; - return t != null && Arg.PossiblyEquals(t.Arg); + return t != null && Finite == t.Finite && Arg.PossiblyEquals(t.Arg); } } @@ -988,7 +1059,7 @@ public virtual string FullCompileName { get { if (ResolvedClass != null && !ResolvedClass.Module.IsDefaultModule) { - return ResolvedClass.Module.CompileName + ".@" + CompileName; + return ResolvedClass.Module.CompileName + ".@" + ResolvedClass.CompileName; } else { return CompileName; } @@ -997,7 +1068,11 @@ public string FullCompanionCompileName { get { Contract.Requires(ResolvedClass is TraitDecl); - var s = ResolvedClass.Module.IsDefaultModule ? "" : ResolvedClass.Module.CompileName + "."; + var m = ResolvedClass.Module; + while (DafnyOptions.O.IronDafny && m.ClonedFrom != null) { + m = m.ClonedFrom; + } + var s = m.IsDefaultModule ? "" : m.CompileName + "."; return s + "@_Companion_" + CompileName; } } @@ -1312,20 +1387,22 @@ /// /// This proxy stands for: - /// set(Arg) or multiset(Arg) or seq(Arg) or map(Arg, anyRange) or imap(Arg, anyRange) + /// set(Arg) or iset(Arg) or multiset(Arg) or seq(Arg) or map(Arg, anyRange) or imap(Arg, anyRange) /// public class CollectionTypeProxy : RestrictedTypeProxy { public readonly Type Arg; public readonly bool AllowIMap; + public readonly bool AllowISet; [ContractInvariantMethod] void ObjectInvariant() { Contract.Invariant(Arg != null); } - public CollectionTypeProxy(Type arg, bool allowIMap) { + public CollectionTypeProxy(Type arg, bool allowIMap, bool allowISet) { Contract.Requires(arg != null); Arg = arg; AllowIMap = allowIMap; + AllowISet = allowISet; } public override int OrderID { get { @@ -1338,6 +1415,7 @@ /// This proxy can stand for any numeric type. /// In addition, if AllowSeq, then it can stand for a seq. /// In addition, if AllowSetVarieties, it can stand for a set or multiset. + /// In addition, if AllowISet, then it can stand for a iset /// public class OperationTypeProxy : RestrictedTypeProxy { public readonly bool AllowInts; @@ -1345,13 +1423,14 @@ public readonly bool AllowChar; public readonly bool AllowSeq; public readonly bool AllowSetVarieties; + public readonly bool AllowISet; public bool JustInts { - get { return AllowInts && !AllowReals && !AllowChar && !AllowSeq && !AllowSetVarieties; } + get { return AllowInts && !AllowReals && !AllowChar && !AllowSeq && !AllowSetVarieties && !AllowISet; } } public bool JustReals { - get { return !AllowInts && AllowReals && !AllowChar && !AllowSeq && !AllowSetVarieties; } + get { return !AllowInts && AllowReals && !AllowChar && !AllowSeq && !AllowSetVarieties && !AllowISet; } } - public OperationTypeProxy(bool allowInts, bool allowReals, bool allowChar, bool allowSeq, bool allowSetVarieties) { + public OperationTypeProxy(bool allowInts, bool allowReals, bool allowChar, bool allowSeq, bool allowSetVarieties, bool allowISet) { Contract.Requires(allowInts || allowReals || allowChar || allowSeq || allowSetVarieties); // don't allow unsatisfiable constraint Contract.Requires(!(!allowInts && !allowReals && allowChar && !allowSeq && !allowSetVarieties)); // to constrain to just char, don't use a proxy AllowInts = allowInts; @@ -1359,6 +1438,7 @@ AllowChar = allowChar; AllowSeq = allowSeq; AllowSetVarieties = allowSetVarieties; + AllowISet = allowISet; } public override int OrderID { get { @@ -1451,22 +1531,41 @@ IToken INamedRegion.BodyEndTok { get { return BodyEndTok; } } string INamedRegion.Name { get { return Name; } } string compileName; + private readonly Declaration clonedFrom; + public virtual string CompileName { get { if (compileName == null) { - compileName = NonglobalVariable.CompilerizeName(Name); + object externValue = ""; + string errorMessage = ""; + bool isExternal = Attributes.ContainsMatchingValue(this.Attributes, "extern", ref externValue, + new Attributes.MatchingValueOption[] { Attributes.MatchingValueOption.String }, + err => errorMessage = err); + if (isExternal) { + compileName = (string)externValue; + } + else { + compileName = NonglobalVariable.CompilerizeName(Name); + } } return compileName; } } public Attributes Attributes; // readonly, except during class merging in the refinement transformations - public Declaration(IToken tok, string name, Attributes attributes) { + public Declaration(IToken tok, string name, Attributes attributes, Declaration clonedFrom) { Contract.Requires(tok != null); Contract.Requires(name != null); this.tok = tok; this.Name = name; this.Attributes = attributes; + this.clonedFrom = clonedFrom; + } + + public Declaration ClonedFrom { + get { + return this.clonedFrom; + } } [Pure] @@ -1478,12 +1577,10 @@ internal FreshIdGenerator IdGenerator = new FreshIdGenerator(); } - public class OpaqueType_AsParameter : TypeParameter - { + public class OpaqueType_AsParameter : TypeParameter { public readonly List TypeArgs; public OpaqueType_AsParameter(IToken tok, string name, EqualitySupportValue equalitySupport, List typeArgs) - : base(tok, name, equalitySupport) - { + : base(tok, name, equalitySupport) { Contract.Requires(tok != null); Contract.Requires(name != null); Contract.Requires(typeArgs != null); @@ -1507,11 +1604,6 @@ set { Contract.Requires(Parent == null); // set it only once Contract.Requires(value != null); - // BUGBUG: The following line is a workaround to tell the verifier that 'value' is not of an Immutable type. - // A proper solution would be to be able to express that in the program (in a specification or attribute) or - // to be able to declare 'parent' as [PeerOrImmutable]. - Contract.Requires(value is TopLevelDecl || value is Function || value is Method || value is DatatypeCtor || value is QuantifierExpr); - //modifies parent; parent = value; } } @@ -1531,8 +1623,8 @@ } public int PositionalIndex; // which type parameter this is (ie. in C, S is 0, T is 1 and U is 2). - public TypeParameter(IToken tok, string name, EqualitySupportValue equalitySupport = EqualitySupportValue.Unspecified) - : base(tok, name, null) { + public TypeParameter(IToken tok, string name, EqualitySupportValue equalitySupport = EqualitySupportValue.Unspecified, Declaration clonedFrom = null) + : base(tok, name, null, clonedFrom) { Contract.Requires(tok != null); Contract.Requires(name != null); EqualitySupport = equalitySupport; @@ -1570,6 +1662,8 @@ public class LiteralModuleDecl : ModuleDecl { public readonly ModuleDefinition ModuleDef; + public ModuleSignature DefaultExport; // the default export of the module. fill in by the resolver. + public LiteralModuleDecl(ModuleDefinition module, ModuleDefinition parent) : base(module.tok, module.Name, parent, false) { ModuleDef = module; @@ -1588,6 +1682,7 @@ } public override object Dereference() { return Signature.ModuleDef; } } + // Represents "module name as path [ = compilePath];", where name is a identifier and path is a possibly qualified name. public class ModuleFacadeDecl : ModuleDecl { @@ -1606,8 +1701,41 @@ public override object Dereference() { return this; } } - public class ModuleSignature { + // Represents the exports of a module. + public class ModuleExportDecl : ModuleDecl + { + public bool IsDefault; + public List Exports; // list of TopLevelDecl that are included in the export + public List Extends; // list of exports that are extended + public readonly List ExtendDecls = new List(); // fill in by the resolver + + public ModuleExportDecl(IToken tok, ModuleDefinition parent, bool isDefault, + List exports, List extends) + : base(tok, tok.val, parent, false) { + IsDefault = isDefault; + Exports = exports; + Extends = extends; + } + + public override object Dereference() { return this; } + } + public class ExportSignature + { + public bool IncludeBody; + public IToken Id; + public string Name; + public Declaration Decl; // fill in by the resolver + + public ExportSignature(IToken id, bool includeBody) { + Id = id; + Name = id.val; + IncludeBody = includeBody; + } + } + + public class ModuleSignature { + private ModuleDefinition exclusiveRefinement = null; public readonly Dictionary TopLevels = new Dictionary(); public readonly Dictionary> Ctors = new Dictionary>(); public readonly Dictionary StaticMembers = new Dictionary(); @@ -1615,7 +1743,7 @@ // it is abstract). Otherwise, it points to that definition. public ModuleSignature CompileSignature = null; // This is the version of the signature that should be used at compile time. public ModuleSignature Refines = null; - public bool IsGhost = false; + public bool IsAbstract = false; public ModuleSignature() {} public bool FindSubmodule(string name, out ModuleSignature pp) { @@ -1628,6 +1756,25 @@ return false; } } + + public ModuleDefinition ExclusiveRefinement { + get { + if (null == exclusiveRefinement) { + return ModuleDef == null ? null : ModuleDef.ExclusiveRefinement; + } else { + return exclusiveRefinement; + } + } + + set { + if (null == ExclusiveRefinement) { + exclusiveRefinement = null; + } else { + throw new InvalidOperationException("An exclusive refinement relationship cannot be amended."); + } + } + } + } public class ModuleDefinition : INamedRegion @@ -1643,22 +1790,79 @@ public readonly Attributes Attributes; public readonly List RefinementBaseName; // null if no refinement base public ModuleDecl RefinementBaseRoot; // filled in early during resolution, corresponds to RefinementBaseName[0] - public ModuleDefinition RefinementBase; // filled in during resolution (null if no refinement base) + public List Includes; public readonly List TopLevelDecls = new List(); // filled in by the parser; readonly after that public readonly Graph CallGraph = new Graph(); // filled in during resolution public int Height; // height in the topological sorting of modules; filled in during resolution public readonly bool IsAbstract; + public readonly bool IsExclusiveRefinement; public readonly bool IsFacade; // True iff this module represents a module facade (that is, an abstract interface) private readonly bool IsBuiltinName; // true if this is something like _System that shouldn't have it's name mangled. + + private ModuleDefinition exclusiveRefinement; + + public ModuleDefinition ExclusiveRefinement { + get { return exclusiveRefinement; } + set { + if (null == exclusiveRefinement) { + if (!value.IsExclusiveRefinement) { + throw new ArgumentException( + string.Format("Exclusive refinement of {0} with 'new' module {0} is disallowed.", + Name, + value.Name)); + } + // todo: validate state of `value`. + exclusiveRefinement = value; + } else { + throw new InvalidOperationException(string.Format("Exclusive refinement of {0} has already been established {1}; cannot reestabilish as {2}.", Name, exclusiveRefinement.Name, value.Name)); + } + } + } + + public int ExclusiveRefinementCount { get; set; } + + private ModuleSignature refinementBaseSig; // module signature of the refinementBase. + public ModuleSignature RefinementBaseSig { + get { + return refinementBaseSig; + } + + set { + // the refinementBase member may only be changed once. + if (null != refinementBaseSig) { + throw new InvalidOperationException(string.Format("This module ({0}) already has a refinement base ({1}).", Name, refinementBase.Name)); + } + refinementBaseSig = value; + } + } + + private ModuleDefinition refinementBase; // filled in during resolution via RefinementBase property (null if no refinement base yet or at all). + + public ModuleDefinition RefinementBase { + get { + return refinementBase; + } + + set { + // the refinementBase member may only be changed once. + if (null != refinementBase) { + throw new InvalidOperationException(string.Format("This module ({0}) already has a refinement base ({1}).", Name, refinementBase.Name)); + } + refinementBase = value; + } + } + + public ModuleDefinition ClonedFrom { get; set; } + [ContractInvariantMethod] void ObjectInvariant() { Contract.Invariant(cce.NonNullElements(TopLevelDecls)); Contract.Invariant(CallGraph != null); } - public ModuleDefinition(IToken tok, string name, bool isAbstract, bool isFacade, List refinementBase, ModuleDefinition parent, Attributes attributes, bool isBuiltinName) + public ModuleDefinition(IToken tok, string name, bool isAbstract, bool isFacade, bool isExclusiveRefinement, List refinementBase, ModuleDefinition parent, Attributes attributes, bool isBuiltinName, Parser parser = null) { Contract.Requires(tok != null); Contract.Requires(name != null); @@ -1669,10 +1873,19 @@ RefinementBaseName = refinementBase; IsAbstract = isAbstract; IsFacade = isFacade; + IsExclusiveRefinement = isExclusiveRefinement; RefinementBaseRoot = null; - RefinementBase = null; + this.refinementBase = null; Includes = new List(); IsBuiltinName = isBuiltinName; + + if (isExclusiveRefinement && !DafnyOptions.O.IronDafny) { + parser.errors.SynErr( + tok.filename, + tok.line, + tok.col, + "The exclusively keyword is experimental and only available when IronDafny features are enabled (/ironDafny)."); + } } public virtual bool IsDefaultModule { get { @@ -1683,15 +1896,34 @@ public string CompileName { get { if (compileName == null) { - if (IsBuiltinName) - compileName = Name; - else - compileName = "_" + Height.ToString() + "_" + NonglobalVariable.CompilerizeName(Name); + object externValue = ""; + string errorMessage = ""; + bool isExternal = Attributes.ContainsMatchingValue(this.Attributes, "extern", ref externValue, + new Attributes.MatchingValueOption[] { Attributes.MatchingValueOption.String }, + err => errorMessage = err); + if (isExternal) { + compileName = (string)externValue; + } else { + if (IsBuiltinName) + compileName = Name; + else + compileName = "_" + Height.ToString() + "_" + NonglobalVariable.CompilerizeName(Name); + } } return compileName; } } + public string RefinementCompileName { + get { + if (ExclusiveRefinement != null) { + return this.ExclusiveRefinement.RefinementCompileName; + } else { + return this.CompileName; + } + } + } + /// /// Determines if "a" and "b" are in the same strongly connected component of the call graph, that is, /// if "a" and "b" are mutually recursive. @@ -1809,7 +2041,9 @@ } public class DefaultModuleDecl : ModuleDefinition { - public DefaultModuleDecl() : base(Token.NoToken, "_module", false, false, null, null, null, true) { + public DefaultModuleDecl() + : base(Token.NoToken, "_module", false, false, /*isExclusiveRefinement:*/ false, null, null, null, true) + { } public override bool IsDefaultModule { get { @@ -1827,13 +2061,14 @@ Contract.Invariant(cce.NonNullElements(TypeArgs)); } - public TopLevelDecl(IToken tok, string name, ModuleDefinition module, List typeArgs, Attributes attributes) - : base(tok, name, attributes) { + public TopLevelDecl(IToken tok, string name, ModuleDefinition module, List typeArgs, Attributes attributes, Declaration clonedFrom = null) + : base(tok, name, attributes, clonedFrom) { Contract.Requires(tok != null); Contract.Requires(name != null); Contract.Requires(cce.NonNullElements(typeArgs)); Module = module; TypeArgs = typeArgs; + ExclusiveRefinement = null; } public string FullName { @@ -1846,6 +2081,13 @@ return Module.CompileName + "." + CompileName; } } + + public string FullSanitizedRefinementName { + get { + return Module.RefinementCompileName + "." + CompileName; + } + } + public string FullNameInContext(ModuleDefinition context) { if (Module == context) { return Name; @@ -1855,9 +2097,14 @@ } public string FullCompileName { get { - return Module.CompileName + ".@" + CompileName; + if (!Module.IsDefaultModule) { + return Module.CompileName + ".@" + CompileName; + } else { + return CompileName; + } } } + public TopLevelDecl ExclusiveRefinement { get; set; } } public class TraitDecl : ClassDecl @@ -1865,8 +2112,8 @@ public override string WhatKind { get { return "trait"; } } public bool IsParent { set; get; } public TraitDecl(IToken tok, string name, ModuleDefinition module, - List typeArgs, [Captured] List members, Attributes attributes) - : base(tok, name, module, typeArgs, members, attributes, null) { } + List typeArgs, [Captured] List members, Attributes attributes, TraitDecl clonedFrom = null) + : base(tok, name, module, typeArgs, members, attributes, null, clonedFrom) { } } public class ClassDecl : TopLevelDecl { @@ -1884,8 +2131,8 @@ } public ClassDecl(IToken tok, string name, ModuleDefinition module, - List typeArgs, [Captured] List members, Attributes attributes, List traits) - : base(tok, name, module, typeArgs, attributes) { + List typeArgs, [Captured] List members, Attributes attributes, List traits, ClassDecl clonedFrom = null) + : base(tok, name, module, typeArgs, attributes, clonedFrom) { Contract.Requires(tok != null); Contract.Requires(name != null); Contract.Requires(module != null); @@ -1899,11 +2146,17 @@ return false; } } + + public new ClassDecl ClonedFrom { + get { + return (ClassDecl)base.ClonedFrom; + } + } } public class DefaultClassDecl : ClassDecl { - public DefaultClassDecl(ModuleDefinition module, [Captured] List members) - : base(Token.NoToken, "_default", module, new List(), members, null, null) { + public DefaultClassDecl(ModuleDefinition module, [Captured] List members, DefaultClassDecl clonedFrom = null) + : base(Token.NoToken, "_default", module, new List(), members, null, null, clonedFrom) { Contract.Requires(module != null); Contract.Requires(cce.NonNullElements(members)); } @@ -1936,9 +2189,9 @@ public readonly Function Requires; public readonly Function Reads; - public ArrowTypeDecl(List tps, Function req, Function reads, ModuleDefinition module, Attributes attributes) + public ArrowTypeDecl(List tps, Function req, Function reads, ModuleDefinition module, Attributes attributes, ArrowTypeDecl clonedFrom) : base(Token.NoToken, ArrowType.ArrowTypeName(tps.Count - 1), module, tps, - new List { req, reads }, attributes, null) { + new List { req, reads }, attributes, null, clonedFrom) { Contract.Requires(tps != null && 1 <= tps.Count); Contract.Requires(req != null); Contract.Requires(reads != null); @@ -1961,8 +2214,8 @@ } public DatatypeDecl(IToken tok, string name, ModuleDefinition module, List typeArgs, - [Captured] List ctors, Attributes attributes) - : base(tok, name, module, typeArgs, attributes) { + [Captured] List ctors, Attributes attributes, DatatypeDecl clonedFrom = null) + : base(tok, name, module, typeArgs, attributes, clonedFrom) { Contract.Requires(tok != null); Contract.Requires(name != null); Contract.Requires(module != null); @@ -1976,6 +2229,12 @@ return (TypeArgs.Count == 0 && Ctors.TrueForAll(ctr => ctr.Formals.Count == 0)); } } + + public new DatatypeDecl ClonedFrom { + get { + return (DatatypeDecl)base.ClonedFrom; + } + } } public class IndDatatypeDecl : DatatypeDecl @@ -1988,8 +2247,8 @@ public ES EqualitySupport = ES.NotYetComputed; public IndDatatypeDecl(IToken tok, string name, ModuleDefinition module, List typeArgs, - [Captured] List ctors, Attributes attributes) - : base(tok, name, module, typeArgs, ctors, attributes) { + [Captured] List ctors, Attributes attributes, IndDatatypeDecl clonedFrom = null) + : base(tok, name, module, typeArgs, ctors, attributes, clonedFrom) { Contract.Requires(tok != null); Contract.Requires(name != null); Contract.Requires(module != null); @@ -1997,6 +2256,12 @@ Contract.Requires(cce.NonNullElements(ctors)); Contract.Requires(1 <= ctors.Count); } + + public new IndDatatypeDecl ClonedFrom { + get { + return (IndDatatypeDecl)base.ClonedFrom; + } + } } public class TupleTypeDecl : IndDatatypeDecl @@ -2052,8 +2317,8 @@ public CoDatatypeDecl SscRepr; // filled in during resolution public CoDatatypeDecl(IToken tok, string name, ModuleDefinition module, List typeArgs, - [Captured] List ctors, Attributes attributes) - : base(tok, name, module, typeArgs, ctors, attributes) { + [Captured] List ctors, Attributes attributes, CoDatatypeDecl clonedFrom = null) + : base(tok, name, module, typeArgs, ctors, attributes, clonedFrom) { Contract.Requires(tok != null); Contract.Requires(name != null); Contract.Requires(module != null); @@ -2080,8 +2345,8 @@ public SpecialField QueryField; // filled in during resolution public List Destructors = new List(); // contents filled in during resolution; includes both implicit (not mentionable in source) and explicit destructors - public DatatypeCtor(IToken tok, string name, [Captured] List formals, Attributes attributes) - : base(tok, name, attributes) { + public DatatypeCtor(IToken tok, string name, [Captured] List formals, Attributes attributes, DatatypeCtor clonedFrom = null) + : base(tok, name, attributes, clonedFrom) { Contract.Requires(tok != null); Contract.Requires(name != null); Contract.Requires(cce.NonNullElements(formals)); @@ -2110,6 +2375,7 @@ bool MustReverify { get; } string FullSanitizedName { get; } bool AllowsNontermination { get; } + bool ContainsQuantifier { get; set; } } /// /// An ICallable is a Function, Method, IteratorDecl, or RedirectingTypeDecl. @@ -2117,6 +2383,7 @@ public interface ICallable : ICodeContext { IToken Tok { get; } + string WhatKind { get; } string NameRelativeToModule { get; } Specification Decreases { get; } /// @@ -2126,8 +2393,10 @@ /// bool InferredDecreases { get; set; } } + public class DontUseICallable : ICallable { + public string WhatKind { get { throw new cce.UnreachableException(); } } public bool IsGhost { get { throw new cce.UnreachableException(); } } public List TypeArgs { get { throw new cce.UnreachableException(); } } public List Ins { get { throw new cce.UnreachableException(); } } @@ -2142,6 +2411,10 @@ get { throw new cce.UnreachableException(); } set { throw new cce.UnreachableException(); } } + public bool ContainsQuantifier { + get { throw new cce.UnreachableException(); } + set { throw new cce.UnreachableException(); } + } } /// /// An IMethodCodeContext is a Method or IteratorDecl. @@ -2170,6 +2443,11 @@ bool ICodeContext.MustReverify { get { Contract.Assume(false, "should not be called on NoContext"); throw new cce.UnreachableException(); } } public string FullSanitizedName { get { Contract.Assume(false, "should not be called on NoContext"); throw new cce.UnreachableException(); } } public bool AllowsNontermination { get { Contract.Assume(false, "should not be called on NoContext"); throw new cce.UnreachableException(); } } + public bool ContainsQuantifier { + get { Contract.Assume(false, "should not be called on NoContext"); throw new cce.UnreachableException(); } + set { Contract.Assume(false, "should not be called on NoContext"); throw new cce.UnreachableException(); } + } + } public class IteratorDecl : ClassDecl, IMethodCodeContext @@ -2197,6 +2475,8 @@ public Predicate Member_Valid; // created during registration phase of resolution; its specification is filled in during resolution public Method Member_MoveNext; // created during registration phase of resolution; its specification is filled in during resolution public readonly LocalVariable YieldCountVariable; + bool containsQuantifier; + public IteratorDecl(IToken tok, string name, ModuleDefinition module, List typeArgs, List ins, List outs, Specification reads, Specification mod, Specification decreases, @@ -2241,6 +2521,48 @@ } /// + /// Returns the non-null expressions of this declaration proper (that is, do not include the expressions of substatements). + /// Does not include the generated class members. + /// + public virtual IEnumerable SubExpressions { + get { + foreach (var e in Attributes.SubExpressions(Attributes)) { + yield return e; + } + foreach (var e in Attributes.SubExpressions(Reads.Attributes)) { + yield return e; + } + foreach (var e in Reads.Expressions) { + yield return e.E; + } + foreach (var e in Attributes.SubExpressions(Modifies.Attributes)) { + yield return e; + } + foreach (var e in Modifies.Expressions) { + yield return e.E; + } + foreach (var e in Attributes.SubExpressions(Decreases.Attributes)) { + yield return e; + } + foreach (var e in Decreases.Expressions) { + yield return e; + } + foreach (var e in Requires) { + yield return e.E; + } + foreach (var e in Ensures) { + yield return e.E; + } + foreach (var e in YieldRequires) { + yield return e.E; + } + foreach (var e in YieldEnsures) { + yield return e.E; + } + } + } + + /// /// This Dafny type exists only for the purpose of giving the yield-count variable a type, so /// that the type can be recognized during translation of Dafny into Boogie. It represents /// an integer component in a "decreases" clause whose order is (\lambda x,y :: x GREATER y), @@ -2270,6 +2592,10 @@ set { _inferredDecr = value; } get { return _inferredDecr; } } + bool ICodeContext.ContainsQuantifier { + set { containsQuantifier = value; } + get { return containsQuantifier; } + } ModuleDefinition ICodeContext.EnclosingModule { get { return this.Module; } } bool ICodeContext.MustReverify { get { return false; } } public bool AllowsNontermination { @@ -2290,8 +2616,8 @@ public readonly bool IsGhost; public TopLevelDecl EnclosingClass; // filled in during resolution public MemberDecl RefinementBase; // filled in during the pre-resolution refinement transformation; null if the member is new here - public MemberDecl(IToken tok, string name, bool hasStaticKeyword, bool isGhost, Attributes attributes) - : base(tok, name, attributes) { + public MemberDecl(IToken tok, string name, bool hasStaticKeyword, bool isGhost, Attributes attributes, Declaration clonedFrom = null) + : base(tok, name, attributes, clonedFrom) { Contract.Requires(tok != null); Contract.Requires(name != null); HasStaticKeyword = hasStaticKeyword; @@ -2316,6 +2642,14 @@ return EnclosingClass.FullSanitizedName + "." + CompileName; } } + public string FullSanitizedRefinementName { + get { + Contract.Requires(EnclosingClass != null); + Contract.Ensures(Contract.Result() != null); + + return EnclosingClass.FullSanitizedRefinementName + "." + CompileName; + } + } public string FullNameInContext(ModuleDefinition context) { Contract.Requires(EnclosingClass != null); Contract.Ensures(Contract.Result() != null); @@ -2339,6 +2673,11 @@ return EnclosingClass.FullCompileName + ".@" + CompileName; } } + public virtual IEnumerable SubExpressions { + get { + yield break; + } + } } public class Field : MemberDecl { @@ -2428,8 +2767,8 @@ Contract.Invariant(TheType != null && Name == TheType.Name); } - public OpaqueTypeDecl(IToken tok, string name, ModuleDefinition module, TypeParameter.EqualitySupportValue equalitySupport, List typeArgs, Attributes attributes) - : base(tok, name, module, typeArgs, attributes) { + public OpaqueTypeDecl(IToken tok, string name, ModuleDefinition module, TypeParameter.EqualitySupportValue equalitySupport, List typeArgs, Attributes attributes, Declaration clonedFrom = null) + : base(tok, name, module, typeArgs, attributes, clonedFrom) { Contract.Requires(tok != null); Contract.Requires(name != null); Contract.Requires(module != null); @@ -2470,16 +2809,16 @@ public readonly BoundVar Var; // can be null (if non-null, then object.ReferenceEquals(Var.Type, BaseType)) public readonly Expression Constraint; // is null iff Var is public NativeType NativeType; // non-null for fixed-size representations (otherwise, use BigIntegers for integers) - public NewtypeDecl(IToken tok, string name, ModuleDefinition module, Type baseType, Attributes attributes) - : base(tok, name, module, new List(), attributes) { + public NewtypeDecl(IToken tok, string name, ModuleDefinition module, Type baseType, Attributes attributes, NewtypeDecl clonedFrom = null) + : base(tok, name, module, new List(), attributes, clonedFrom) { Contract.Requires(tok != null); Contract.Requires(name != null); Contract.Requires(module != null); Contract.Requires(baseType != null); BaseType = baseType; } - public NewtypeDecl(IToken tok, string name, ModuleDefinition module, BoundVar bv, Expression constraint, Attributes attributes) - : base(tok, name, module, new List(), attributes) { + public NewtypeDecl(IToken tok, string name, ModuleDefinition module, BoundVar bv, Expression constraint, Attributes attributes, NewtypeDecl clonedFrom = null) + : base(tok, name, module, new List(), attributes, clonedFrom) { Contract.Requires(tok != null); Contract.Requires(name != null); Contract.Requires(module != null); @@ -2510,14 +2849,23 @@ get { throw new cce.UnreachableException(); } // see comment above about ICallable.Decreases set { throw new cce.UnreachableException(); } // see comment above about ICallable.Decreases } + bool ICodeContext.ContainsQuantifier { + get { throw new cce.UnreachableException(); } + set { throw new cce.UnreachableException(); } + } + public new NewtypeDecl ClonedFrom { + get { + return (NewtypeDecl)base.ClonedFrom; + } + } } public class TypeSynonymDecl : TopLevelDecl, RedirectingTypeDecl { public override string WhatKind { get { return "type synonym"; } } public readonly Type Rhs; - public TypeSynonymDecl(IToken tok, string name, List typeArgs, ModuleDefinition module, Type rhs, Attributes attributes) - : base(tok, name, module, typeArgs, attributes) { + public TypeSynonymDecl(IToken tok, string name, List typeArgs, ModuleDefinition module, Type rhs, Attributes attributes, TypeSynonymDecl clonedFrom = null) + : base(tok, name, module, typeArgs, attributes, clonedFrom) { Contract.Requires(tok != null); Contract.Requires(name != null); Contract.Requires(typeArgs != null); @@ -2562,6 +2910,10 @@ get { throw new cce.UnreachableException(); } // see comment above about ICallable.Decreases set { throw new cce.UnreachableException(); } // see comment above about ICallable.Decreases } + bool ICodeContext.ContainsQuantifier { + get { throw new cce.UnreachableException(); } + set { throw new cce.UnreachableException(); } + } } [ContractClass(typeof(IVariableContracts))] @@ -2823,19 +3175,7 @@ } } - /// - /// A "ThisSurrogate" is used during translation time to make the treatment of the receiver more similar to - /// the treatment of other in-parameters. - /// - public class ThisSurrogate : Formal - { - public ThisSurrogate(IToken tok, Type type) - : base(tok, "this", type, true, false) { - Contract.Requires(tok != null); - Contract.Requires(type != null); - } - } - + [DebuggerDisplay("Bound<{name}>")] public class BoundVar : NonglobalVariable { public override bool IsMutable { get { @@ -2855,6 +3195,7 @@ public override string WhatKind { get { return "function"; } } public readonly bool IsProtected; public bool IsRecursive; // filled in during resolution + public bool IsFueled; // filled in during resolution if anyone tries to adjust this function's fuel public readonly List TypeArgs; public readonly List Formals; public readonly Type ResultType; @@ -2867,6 +3208,31 @@ public readonly IToken SignatureEllipsis; public bool IsBuiltin; public Function OverriddenFunction; + public bool containsQuantifier; + public bool ContainsQuantifier { + set { containsQuantifier = value; } + get { return containsQuantifier; } + } + + public override IEnumerable SubExpressions { + get { + foreach (var e in Req) { + yield return e; + } + foreach (var e in Reads) { + yield return e.E; + } + foreach (var e in Ens) { + yield return e; + } + foreach (var e in Decreases.Expressions) { + yield return e; + } + if (Body != null) { + yield return Body; + } + } + } public Type Type { get { @@ -2880,7 +3246,7 @@ return Contract.Exists(Decreases.Expressions, e => e is WildcardExpr); } } - + /// /// The "AllCalls" field is used for non-FixpointPredicate, non-PrefixPredicate functions only (so its value should not be relied upon for FixpointPredicate and PrefixPredicate functions). /// It records all function calls made by the Function, including calls made in the body as well as in the specification. @@ -2912,8 +3278,8 @@ public Function(IToken tok, string name, bool hasStaticKeyword, bool isProtected, bool isGhost, List typeArgs, List formals, Type resultType, List req, List reads, List ens, Specification decreases, - Expression body, Attributes attributes, IToken signatureEllipsis) - : base(tok, name, hasStaticKeyword, isGhost, attributes) { + Expression body, Attributes attributes, IToken signatureEllipsis, Declaration clonedFrom = null) + : base(tok, name, hasStaticKeyword, isGhost, attributes, clonedFrom) { Contract.Requires(tok != null); Contract.Requires(name != null); @@ -2925,6 +3291,7 @@ Contract.Requires(cce.NonNullElements(ens)); Contract.Requires(decreases != null); this.IsProtected = isProtected; + this.IsFueled = false; // Defaults to false. Only set to true if someone mentions this function in a fuel annotation this.TypeArgs = typeArgs; this.Formals = formals; this.ResultType = resultType; @@ -2934,7 +3301,27 @@ this.Decreases = decreases; this.Body = body; this.SignatureEllipsis = signatureEllipsis; + + if (attributes != null) { + List args = Attributes.FindExpressions(attributes, "fuel"); + if (args != null) { + if (args.Count == 1) { + LiteralExpr literal = args[0] as LiteralExpr; + if (literal != null && literal.Value is BigInteger) { + this.IsFueled = true; + } + } else if (args.Count == 2) { + LiteralExpr literalLow = args[0] as LiteralExpr; + LiteralExpr literalHigh = args[1] as LiteralExpr; + + if (literalLow != null && literalLow.Value is BigInteger && literalHigh != null && literalHigh.Value is BigInteger) { + this.IsFueled = true; + } + } + } + } } + bool ICodeContext.IsGhost { get { return this.IsGhost; } } List ICodeContext.TypeArgs { get { return this.TypeArgs; } } @@ -2949,6 +3336,9 @@ } ModuleDefinition ICodeContext.EnclosingModule { get { return this.EnclosingClass.Module; } } bool ICodeContext.MustReverify { get { return false; } } + + [Pure] + public bool IsFuelAware() { return IsRecursive || IsFueled; } } public class Predicate : Function @@ -2964,8 +3354,8 @@ public Predicate(IToken tok, string name, bool hasStaticKeyword, bool isProtected, bool isGhost, List typeArgs, List formals, List req, List reads, List ens, Specification decreases, - Expression body, BodyOriginKind bodyOrigin, Attributes attributes, IToken signatureEllipsis) - : base(tok, name, hasStaticKeyword, isProtected, isGhost, typeArgs, formals, new BoolType(), req, reads, ens, decreases, body, attributes, signatureEllipsis) { + Expression body, BodyOriginKind bodyOrigin, Attributes attributes, IToken signatureEllipsis, Declaration clonedFrom = null) + : base(tok, name, hasStaticKeyword, isProtected, isGhost, typeArgs, formals, new BoolType(), req, reads, ens, decreases, body, attributes, signatureEllipsis, clonedFrom) { Contract.Requires(bodyOrigin == Predicate.BodyOriginKind.OriginalOrInherited || body != null); BodyOrigin = bodyOrigin; } @@ -2983,7 +3373,7 @@ List typeArgs, Formal k, List formals, List req, List reads, List ens, Specification decreases, Expression body, Attributes attributes, FixpointPredicate fixpointPred) - : base(tok, name, hasStaticKeyword, isProtected, true, typeArgs, formals, new BoolType(), req, reads, ens, decreases, body, attributes, null) { + : base(tok, name, hasStaticKeyword, isProtected, true, typeArgs, formals, new BoolType(), req, reads, ens, decreases, body, attributes, null, null) { Contract.Requires(k != null); Contract.Requires(fixpointPred != null); Contract.Requires(formals != null && 1 <= formals.Count && formals[0] == k); @@ -3000,9 +3390,9 @@ public FixpointPredicate(IToken tok, string name, bool hasStaticKeyword, bool isProtected, List typeArgs, List formals, List req, List reads, List ens, - Expression body, Attributes attributes, IToken signatureEllipsis) + Expression body, Attributes attributes, IToken signatureEllipsis, Declaration clonedFrom = null) : base(tok, name, hasStaticKeyword, isProtected, true, typeArgs, formals, new BoolType(), - req, reads, ens, new Specification(new List(), null), body, attributes, signatureEllipsis) { + req, reads, ens, new Specification(new List(), null), body, attributes, signatureEllipsis, clonedFrom) { } /// @@ -3023,7 +3413,7 @@ prefixPredCall.TypeArgumentSubstitutions = new Dictionary(); var old_to_new = new Dictionary(); for (int i = 0; i < this.TypeArgs.Count; i++) { - old_to_new[this.TypeArgs[i]] = this.PrefixPredicate.TypeArgs[i]; + old_to_new[this.TypeArgs[i]] = this.PrefixPredicate.TypeArgs[i]; } foreach (var p in fexp.TypeArgumentSubstitutions) { prefixPredCall.TypeArgumentSubstitutions[old_to_new[p.Key]] = p.Value; @@ -3041,9 +3431,9 @@ public InductivePredicate(IToken tok, string name, bool hasStaticKeyword, bool isProtected, List typeArgs, List formals, List req, List reads, List ens, - Expression body, Attributes attributes, IToken signatureEllipsis) + Expression body, Attributes attributes, IToken signatureEllipsis, Declaration clonedFrom = null) : base(tok, name, hasStaticKeyword, isProtected, typeArgs, formals, - req, reads, ens, body, attributes, signatureEllipsis) { + req, reads, ens, body, attributes, signatureEllipsis, clonedFrom) { } } @@ -3053,9 +3443,9 @@ public CoPredicate(IToken tok, string name, bool hasStaticKeyword, bool isProtected, List typeArgs, List formals, List req, List reads, List ens, - Expression body, Attributes attributes, IToken signatureEllipsis) + Expression body, Attributes attributes, IToken signatureEllipsis, Declaration clonedFrom = null) : base(tok, name, hasStaticKeyword, isProtected, typeArgs, formals, - req, reads, ens, body, attributes, signatureEllipsis) { + req, reads, ens, body, attributes, signatureEllipsis, clonedFrom) { } } @@ -3077,6 +3467,25 @@ public bool IsTailRecursive; // filled in during resolution public readonly ISet AssignedAssumptionVariables = new HashSet(); public Method OverriddenMethod; + public bool containsQuantifier; + + public override IEnumerable SubExpressions { + get { + foreach (var e in Req) { + yield return e.E; + } + foreach (var e in Mod.Expressions) { + yield return e.E; + } + foreach (var e in Ens) { + yield return e.E; + } + foreach (var e in Decreases.Expressions) { + yield return e; + } + } + } + [ContractInvariantMethod] void ObjectInvariant() { @@ -3097,8 +3506,8 @@ [Captured] List ens, [Captured] Specification decreases, [Captured] BlockStmt body, - Attributes attributes, IToken signatureEllipsis) - : base(tok, name, hasStaticKeyword, isGhost, attributes) { + Attributes attributes, IToken signatureEllipsis, Declaration clonedFrom = null) + : base(tok, name, hasStaticKeyword, isGhost, attributes, clonedFrom) { Contract.Requires(tok != null); Contract.Requires(name != null); Contract.Requires(cce.NonNullElements(typeArgs)); @@ -3133,6 +3542,11 @@ set { _inferredDecr = value; } get { return _inferredDecr; } } + bool ICodeContext.ContainsQuantifier { + set { containsQuantifier = value; } + get { return containsQuantifier; } + } + ModuleDefinition ICodeContext.EnclosingModule { get { Contract.Assert(this.EnclosingClass != null); // this getter is supposed to be called only after signature-resolution is complete @@ -3170,8 +3584,8 @@ [Captured] List ens, [Captured] Specification decreases, [Captured] BlockStmt body, - Attributes attributes, IToken signatureEllipsis) - : base(tok, name, hasStaticKeyword, true, typeArgs, ins, outs, req, mod, ens, decreases, body, attributes, signatureEllipsis) { + Attributes attributes, IToken signatureEllipsis, Declaration clonedFrom = null) + : base(tok, name, hasStaticKeyword, true, typeArgs, ins, outs, req, mod, ens, decreases, body, attributes, signatureEllipsis, clonedFrom) { } } @@ -3185,8 +3599,8 @@ [Captured] List ens, [Captured] Specification decreases, [Captured] BlockStmt body, - Attributes attributes, IToken signatureEllipsis) - : base(tok, name, false, false, typeArgs, ins, new List(), req, mod, ens, decreases, body, attributes, signatureEllipsis) { + Attributes attributes, IToken signatureEllipsis, Declaration clonedFrom = null) + : base(tok, name, false, false, typeArgs, ins, new List(), req, mod, ens, decreases, body, attributes, signatureEllipsis, clonedFrom) { Contract.Requires(tok != null); Contract.Requires(name != null); Contract.Requires(cce.NonNullElements(typeArgs)); @@ -3237,8 +3651,8 @@ List ens, Specification decreases, BlockStmt body, - Attributes attributes, IToken signatureEllipsis) - : base(tok, name, hasStaticKeyword, true, typeArgs, ins, outs, req, mod, ens, decreases, body, attributes, signatureEllipsis) { + Attributes attributes, IToken signatureEllipsis, Declaration clonedFrom) + : base(tok, name, hasStaticKeyword, true, typeArgs, ins, outs, req, mod, ens, decreases, body, attributes, signatureEllipsis, clonedFrom) { Contract.Requires(tok != null); Contract.Requires(name != null); Contract.Requires(cce.NonNullElements(typeArgs)); @@ -3263,8 +3677,8 @@ List ens, Specification decreases, BlockStmt body, - Attributes attributes, IToken signatureEllipsis) - : base(tok, name, hasStaticKeyword, typeArgs, ins, outs, req, mod, ens, decreases, body, attributes, signatureEllipsis) { + Attributes attributes, IToken signatureEllipsis, Declaration clonedFrom = null) + : base(tok, name, hasStaticKeyword, typeArgs, ins, outs, req, mod, ens, decreases, body, attributes, signatureEllipsis, clonedFrom) { Contract.Requires(tok != null); Contract.Requires(name != null); Contract.Requires(cce.NonNullElements(typeArgs)); @@ -3289,8 +3703,8 @@ List ens, Specification decreases, BlockStmt body, - Attributes attributes, IToken signatureEllipsis) - : base(tok, name, hasStaticKeyword, typeArgs, ins, outs, req, mod, ens, decreases, body, attributes, signatureEllipsis) { + Attributes attributes, IToken signatureEllipsis, Declaration clonedFrom = null) + : base(tok, name, hasStaticKeyword, typeArgs, ins, outs, req, mod, ens, decreases, body, attributes, signatureEllipsis, clonedFrom) { Contract.Requires(tok != null); Contract.Requires(name != null); Contract.Requires(cce.NonNullElements(typeArgs)); @@ -3614,7 +4028,7 @@ Contract.Invariant(Expr != null); } - public ExprRhs(Expression expr, Attributes attrs = null) + public ExprRhs(Expression expr, Attributes attrs = null) // TODO: these 'attrs' apparently aren't handled correctly in the Cloner, and perhaps not in various visitors either (for example, CheckIsCompilable should not go into attributes) : base(expr.tok, attrs) { Contract.Requires(expr != null); @@ -3775,6 +4189,28 @@ } } + public class LetStmt : Statement + { + public readonly List LHSs; + public readonly List RHSs; + + public LetStmt(IToken tok, IToken endTok, List lhss, List rhss) + : base(tok, endTok) { + LHSs = lhss; + RHSs = rhss; + } + + public IEnumerable BoundVars { + get { + foreach (var lhs in LHSs) { + foreach (var bv in lhs.Vars) { + yield return bv; + } + } + } + } + } + /// /// Common superclass of UpdateStmt and AssignSuchThatStmt. /// @@ -3804,6 +4240,9 @@ public override bool IsFinite { get { return false; } } + public override int Preference() { + return 0; + } } /// @@ -3914,17 +4353,41 @@ /// public static bool LhsIsToGhost(Expression lhs) { Contract.Requires(lhs != null); + return LhsIsToGhost_Which(lhs) == NonGhostKind.IsGhost; + } + public enum NonGhostKind { IsGhost, Variable, Field, ArrayElement } + public static string NonGhostKind_To_String(NonGhostKind gk) { + Contract.Requires(gk != NonGhostKind.IsGhost); + switch (gk) { + case NonGhostKind.Variable: return "non-ghost variable"; + case NonGhostKind.Field: return "non-ghost field"; + case NonGhostKind.ArrayElement: return "array element"; + default: + Contract.Assume(false); // unexpected NonGhostKind + throw new cce.UnreachableException(); // please compiler + } + } + /// + /// This method assumes "lhs" has been successfully resolved. + /// + public static NonGhostKind LhsIsToGhost_Which(Expression lhs) { + Contract.Requires(lhs != null); lhs = lhs.Resolved; if (lhs is IdentifierExpr) { var x = (IdentifierExpr)lhs; - return x.Var.IsGhost; + if (!x.Var.IsGhost) { + return NonGhostKind.Variable; + } } else if (lhs is MemberSelectExpr) { var x = (MemberSelectExpr)lhs; - return x.Member.IsGhost; + if (!x.Member.IsGhost) { + return NonGhostKind.Field; + } } else { // LHS denotes an array element, which is always non-ghost - return false; + return NonGhostKind.ArrayElement; } + return NonGhostKind.IsGhost; } } @@ -4095,20 +4558,24 @@ } public class IfStmt : Statement { + public readonly bool IsExistentialGuard; public readonly Expression Guard; public readonly BlockStmt Thn; public readonly Statement Els; [ContractInvariantMethod] void ObjectInvariant() { + Contract.Invariant(!IsExistentialGuard || (Guard is ExistsExpr && ((ExistsExpr)Guard).Range == null)); Contract.Invariant(Thn != null); Contract.Invariant(Els == null || Els is BlockStmt || Els is IfStmt || Els is SkeletonStatement); } - public IfStmt(IToken tok, IToken endTok, Expression guard, BlockStmt thn, Statement els) + public IfStmt(IToken tok, IToken endTok, bool isExistentialGuard, Expression guard, BlockStmt thn, Statement els) : base(tok, endTok) { Contract.Requires(tok != null); Contract.Requires(endTok != null); + Contract.Requires(!isExistentialGuard || (guard is ExistsExpr && ((ExistsExpr)guard).Range == null)); Contract.Requires(thn != null); Contract.Requires(els == null || els is BlockStmt || els is IfStmt || els is SkeletonStatement); + this.IsExistentialGuard = isExistentialGuard; this.Guard = guard; this.Thn = thn; this.Els = els; @@ -4134,20 +4601,24 @@ public class GuardedAlternative { public readonly IToken Tok; + public readonly bool IsExistentialGuard; public readonly Expression Guard; public readonly List Body; [ContractInvariantMethod] void ObjectInvariant() { Contract.Invariant(Tok != null); Contract.Invariant(Guard != null); + Contract.Invariant(!IsExistentialGuard || (Guard is ExistsExpr && ((ExistsExpr)Guard).Range == null)); Contract.Invariant(Body != null); } - public GuardedAlternative(IToken tok, Expression guard, List body) + public GuardedAlternative(IToken tok, bool isExistentialGuard, Expression guard, List body) { Contract.Requires(tok != null); Contract.Requires(guard != null); + Contract.Requires(!isExistentialGuard || (guard is ExistsExpr && ((ExistsExpr)guard).Range == null)); Contract.Requires(body != null); this.Tok = tok; + this.IsExistentialGuard = isExistentialGuard; this.Guard = guard; this.Body = body; } @@ -4327,6 +4798,7 @@ public readonly Expression Range; public readonly List Ens; public readonly Statement Body; + public List ForallExpressions; // fill in by rewriter. public List Bounds; // initialized and filled in by resolver // invariant: if successfully resolved, Bounds.Count == BoundVars.Count; @@ -4541,7 +5013,12 @@ public override Expression StepExpr(Expression line0, Expression line1) { - return new BinaryExpr(line0.tok, Op, line0, line1); + if (Op == BinaryExpr.Opcode.Exp) { + // The order of operands is reversed so that it can be turned into implication during resolution + return new BinaryExpr(line0.tok, Op, line1, line0); + } else { + return new BinaryExpr(line0.tok, Op, line0, line1); + } } public override string ToString() @@ -4621,7 +5098,7 @@ Contract.Invariant(StepOps.Count == Hints.Count); } - public CalcStmt(IToken tok, IToken endTok, CalcOp op, List lines, List hints, List stepOps, CalcOp resultOp) + public CalcStmt(IToken tok, IToken endTok, CalcOp op, List lines, List hints, List stepOps, CalcOp resultOp, Attributes attrs) : base(tok, endTok) { Contract.Requires(tok != null); @@ -4646,6 +5123,7 @@ } this.Steps = new List(); this.Result = null; + this.Attributes = attrs; } public override IEnumerable SubStatements @@ -4660,14 +5138,30 @@ { get { foreach (var e in base.SubExpressions) { yield return e; } - foreach (var l in Lines) { - yield return l; + foreach (var e in Attributes.SubExpressions(Attributes)) { yield return e; } + + for (int i = 0; i < Lines.Count - 1; i++) { // note, we skip the duplicated line at the end + yield return Lines[i]; } - foreach (var e in Steps) { - yield return e; + foreach (var calcop in AllCalcOps) { + var o3 = calcop as TernaryCalcOp; + if (o3 != null) { + yield return o3.Index; + } + } + } + } + + IEnumerable AllCalcOps { + get { + if (Op != null) { + yield return Op; } - if (Result != null) { - yield return Result; + foreach (var stepop in StepOps) { + yield return stepop; + } + if (ResultOp != null) { + yield return ResultOp; } } } @@ -4710,8 +5204,8 @@ Contract.Invariant(cce.NonNullElements(MissingCases)); } - public readonly Expression Source; - public readonly List Cases; + private Expression source; + private List cases; public readonly List MissingCases = new List(); // filled in during resolution public readonly bool UsesOptionalBraces; @@ -4721,14 +5215,31 @@ Contract.Requires(endTok != null); Contract.Requires(source != null); Contract.Requires(cce.NonNullElements(cases)); - this.Source = source; - this.Cases = cases; + this.source = source; + this.cases = cases; this.UsesOptionalBraces = usesOptionalBraces; } + public Expression Source { + get { return source; } + } + + public List Cases { + get { return cases; } + } + + // should only be used in desugar in resolve to change the cases of the matchexpr + public void UpdateSource(Expression source) { + this.source = source; + } + + public void UpdateCases(List cases) { + this.cases = cases; + } + public override IEnumerable SubStatements { get { - foreach (var kase in Cases) { + foreach (var kase in cases) { foreach (var s in kase.Body) { yield return s; } @@ -4745,7 +5256,7 @@ public class MatchCaseStmt : MatchCase { - public readonly List Body; + private List body; [ContractInvariantMethod] void ObjectInvariant() { @@ -4759,7 +5270,25 @@ Contract.Requires(id != null); Contract.Requires(cce.NonNullElements(arguments)); Contract.Requires(cce.NonNullElements(body)); - this.Body = body; + this.body = body; + } + + public MatchCaseStmt(IToken tok, string id, [Captured] List cps, [Captured] List body) + : base(tok, id, cps) { + Contract.Requires(tok != null); + Contract.Requires(id != null); + Contract.Requires(cce.NonNullElements(cps)); + Contract.Requires(cce.NonNullElements(body)); + this.body = body; + } + + public List Body { + get { return body; } + } + + // should only be called by resolve to reset the body of the MatchCaseExpr + public void UpdateBody(List body) { + this.body = body; } } @@ -4896,7 +5425,7 @@ } // ------------------------------------------------------------------------------------------------------ - + [DebuggerDisplay("{Printer.ExprToString(this)}")] public abstract class Expression { public readonly IToken tok; @@ -4961,6 +5490,10 @@ get { yield break; } } + public virtual bool IsImplicit { + get { return false; } + } + public static IEnumerable Conjuncts(Expression expr) { Contract.Requires(expr != null); Contract.Requires(expr.Type.IsBoolType); @@ -4995,7 +5528,9 @@ public static Expression CreateAdd(Expression e0, Expression e1) { Contract.Requires(e0 != null); Contract.Requires(e1 != null); - Contract.Requires((e0.Type.IsIntegerType && e1.Type.IsIntegerType) || (e0.Type.IsRealType && e1.Type.IsRealType)); + Contract.Requires( + (e0.Type.IsNumericBased(Type.NumericPersuation.Int) && e1.Type.IsNumericBased(Type.NumericPersuation.Int)) || + (e0.Type.IsNumericBased(Type.NumericPersuation.Real) && e1.Type.IsNumericBased(Type.NumericPersuation.Real))); Contract.Ensures(Contract.Result() != null); var s = new BinaryExpr(e0.tok, BinaryExpr.Opcode.Add, e0, e1); s.ResolvedOp = BinaryExpr.ResolvedOpcode.Add; // resolve here @@ -5003,6 +5538,7 @@ return s; } + /// /// Create a resolved expression of the form "CVT(e0) - CVT(e1)", where "CVT" is either "int" (if /// e0.Type is an integer-based numeric type) or "real" (if e0.Type is a real-based numeric type). @@ -5016,20 +5552,32 @@ Contract.Ensures(Contract.Result() != null); Type toType = e0.Type.IsNumericBased(Type.NumericPersuation.Int) ? (Type)Type.Int : Type.Real; - e0 = new ConversionExpr(e0.tok, e0, toType); - e0.Type = toType; - e1 = new ConversionExpr(e1.tok, e1, toType); - e1.Type = toType; + e0 = CastIfNeeded(e0, toType); + e1 = CastIfNeeded(e1, toType); return CreateSubtract(e0, e1); } + private static Expression CastIfNeeded(Expression expr, Type toType) { + if (!expr.Type.Equals(toType)) { + var cast = new ConversionExpr(expr.tok, expr, toType); + cast.Type = toType; + return cast; + } else { + return expr; + } + } + /// /// Create a resolved expression of the form "e0 - e1" /// public static Expression CreateSubtract(Expression e0, Expression e1) { Contract.Requires(e0 != null); + Contract.Requires(e0.Type != null); Contract.Requires(e1 != null); - Contract.Requires((e0.Type.IsIntegerType && e1.Type.IsIntegerType) || (e0.Type.IsRealType && e1.Type.IsRealType)); + Contract.Requires(e1.Type != null); + Contract.Requires( + (e0.Type.IsNumericBased(Type.NumericPersuation.Int) && e1.Type.IsNumericBased(Type.NumericPersuation.Int)) || + (e0.Type.IsNumericBased(Type.NumericPersuation.Real) && e1.Type.IsNumericBased(Type.NumericPersuation.Real))); Contract.Ensures(Contract.Result() != null); var s = new BinaryExpr(e0.tok, BinaryExpr.Opcode.Sub, e0, e1); s.ResolvedOp = BinaryExpr.ResolvedOpcode.Sub; // resolve here @@ -5042,7 +5590,8 @@ /// public static Expression CreateIncrement(Expression e, int n) { Contract.Requires(e != null); - Contract.Requires(e.Type.IsIntegerType); + Contract.Requires(e.Type != null); + Contract.Requires(e.Type.IsNumericBased(Type.NumericPersuation.Int)); Contract.Requires(0 <= n); Contract.Ensures(Contract.Result() != null); if (n == 0) { @@ -5057,7 +5606,7 @@ /// public static Expression CreateDecrement(Expression e, int n) { Contract.Requires(e != null); - Contract.Requires(e.Type.IsIntegerType); + Contract.Requires(e.Type.IsNumericBased(Type.NumericPersuation.Int)); Contract.Requires(0 <= n); Contract.Ensures(Contract.Result() != null); if (n == 0) { @@ -5116,7 +5665,7 @@ public static Expression CreateLess(Expression e0, Expression e1) { Contract.Requires(e0 != null); Contract.Requires(e1 != null); - Contract.Requires(e0.Type.IsIntegerType && e1.Type.IsIntegerType); + Contract.Requires(e0.Type.IsNumericBased(Type.NumericPersuation.Int) && e1.Type.IsNumericBased(Type.NumericPersuation.Int)); Contract.Ensures(Contract.Result() != null); var s = new BinaryExpr(e0.tok, BinaryExpr.Opcode.Lt, e0, e1); s.ResolvedOp = BinaryExpr.ResolvedOpcode.Lt; // resolve here @@ -5130,7 +5679,9 @@ public static Expression CreateAtMost(Expression e0, Expression e1) { Contract.Requires(e0 != null); Contract.Requires(e1 != null); - Contract.Requires((e0.Type.IsIntegerType && e1.Type.IsIntegerType) || (e0.Type.IsRealType && e1.Type.IsRealType)); + Contract.Requires( + (e0.Type.IsNumericBased(Type.NumericPersuation.Int) && e1.Type.IsNumericBased(Type.NumericPersuation.Int)) || + (e0.Type.IsNumericBased(Type.NumericPersuation.Real) && e1.Type.IsNumericBased(Type.NumericPersuation.Real))); Contract.Ensures(Contract.Result() != null); var s = new BinaryExpr(e0.tok, BinaryExpr.Opcode.Le, e0, e1); s.ResolvedOp = BinaryExpr.ResolvedOpcode.Le; // resolve here @@ -5337,19 +5888,21 @@ public class StaticReceiverExpr : LiteralExpr { public readonly Type UnresolvedType; + private bool Implicit; - public StaticReceiverExpr(IToken tok, Type t) + public StaticReceiverExpr(IToken tok, Type t, bool isImplicit) : base(tok) { Contract.Requires(tok != null); Contract.Requires(t != null); UnresolvedType = t; + Implicit = isImplicit; } /// /// Constructs a resolved LiteralExpr representing the 'null' literal whose type is "cl" /// parameterized by the type arguments of "cl" itself. /// - public StaticReceiverExpr(IToken tok, ClassDecl cl) + public StaticReceiverExpr(IToken tok, ClassDecl cl, bool isImplicit) : base(tok) { Contract.Requires(tok != null); @@ -5357,6 +5910,7 @@ var typeArgs = cl.TypeArgs.ConvertAll(tp => (Type)new UserDefinedType(tp)); Type = new UserDefinedType(tok, cl.Name, cl, typeArgs); UnresolvedType = Type; + Implicit = isImplicit; } /// @@ -5373,7 +5927,7 @@ /// a trait that in turn extends trait "W(g(Y))". If "t" denotes type "C(G)" and "cl" denotes "W", /// then type of the StaticReceiverExpr will be "T(g(f(G)))". /// - public StaticReceiverExpr(IToken tok, UserDefinedType t, ClassDecl cl) + public StaticReceiverExpr(IToken tok, UserDefinedType t, ClassDecl cl, bool isImplicit) : base(tok) { Contract.Requires(tok != null); Contract.Requires(t.ResolvedClass != null); @@ -5387,6 +5941,11 @@ } Type = t; UnresolvedType = Type; + Implicit = isImplicit; + } + + public override bool IsImplicit { + get { return Implicit; } } } @@ -5502,8 +6061,8 @@ Contract.Invariant(cce.NonNullElements(Arguments)); Contract.Invariant(cce.NonNullElements(InferredTypeArgs)); Contract.Invariant( - Ctor == null || - InferredTypeArgs.Count == Ctor.EnclosingDatatype.TypeArgs.Count); + Ctor == null || + InferredTypeArgs.Count == Ctor.EnclosingDatatype.TypeArgs.Count); } public DatatypeValue(IToken tok, string datatypeName, string memberName, [Captured] List arguments) @@ -5543,6 +6102,10 @@ : base(tok) { Contract.Requires(tok != null); } + + public override bool IsImplicit { + get { return true; } + } } public class IdentifierExpr : Expression @@ -5561,6 +6124,16 @@ Contract.Requires(name != null); Name = name; } + /// + /// Constructs a resolved IdentifierExpr. + /// + public IdentifierExpr(IVariable v) + : base(v.Tok) { + Contract.Requires(v != null); + Name = v.Name; + Var = v; + Type = v.Type; + } } /// @@ -5656,10 +6229,12 @@ } public class SetDisplayExpr : DisplayExpression { - public SetDisplayExpr(IToken tok, List elements) + public bool Finite; + public SetDisplayExpr(IToken tok, bool finite, List elements) : base(tok, elements) { Contract.Requires(tok != null); Contract.Requires(cce.NonNullElements(elements)); + Finite = finite; } } @@ -5762,7 +6337,6 @@ } } - public override IEnumerable SubExpressions { get { yield return Obj; } } @@ -5931,10 +6505,10 @@ Function == null || TypeArgumentSubstitutions == null || Contract.ForAll( Function.TypeArgs, - a => TypeArgumentSubstitutions.ContainsKey(a)) && + a => TypeArgumentSubstitutions.ContainsKey(a)) && Contract.ForAll( TypeArgumentSubstitutions.Keys, - a => Function.TypeArgs.Contains(a) || Function.EnclosingClass.TypeArgs.Contains(a))); + a => Function.TypeArgs.Contains(a) || Function.EnclosingClass.TypeArgs.Contains(a))); } public Function Function; // filled in by resolution @@ -6338,14 +6912,17 @@ Contract.Requires(e0 != null); Contract.Requires(e1 != null); this.Op = op; - if (op == Opcode.Exp) { - // The order of operands is reversed so that it can be turned into implication during resolution - this.E0 = e1; - this.E1 = e0; - } else { - this.E0 = e0; - this.E1 = e1; - } + this.E0 = e0; + this.E1 = e1; + } + + /// + /// Returns a resolved binary expression + /// + public BinaryExpr(Boogie.IToken tok, BinaryExpr.ResolvedOpcode rop, Expression e0, Expression e1) + : this(tok, BinaryExpr.ResolvedOp2SyntacticOp(rop), e0, e1) { + ResolvedOp = rop; + Type = Type.Bool; } public override IEnumerable SubExpressions { @@ -6391,7 +6968,7 @@ public readonly Expression Body; public readonly bool Exact; // Exact==true means a regular let expression; Exact==false means an assign-such-that expression public readonly Attributes Attributes; - public List Constraint_Bounds; // initialized and filled in by resolver; null for Exact=true and for a ghost statement + public List Constraint_Bounds; // initialized and filled in by resolver; null for Exact=true and for when expression is in a ghost context // invariant Constraint_Bounds == null || Constraint_Bounds.Count == BoundVars.Count; public List Constraint_MissingBounds; // filled in during resolution; remains "null" if Exact==true or if bounds can be found // invariant Constraint_Bounds == null || Constraint_MissingBounds == null; @@ -6478,15 +7055,87 @@ Contract.Invariant(Term != null); } - public readonly Attributes Attributes; + public Attributes Attributes; public abstract class BoundedPool { public virtual bool IsFinite { get { return true; } // most bounds are finite } + public abstract int Preference(); // higher is better + + public static BoundedPool GetBest(List bounds, bool onlyFiniteBounds) { + Contract.Requires(bounds != null); + bounds = CombineIntegerBounds(bounds); + BoundedPool best = null; + foreach (var bound in bounds) { + if (!onlyFiniteBounds || bound.IsFinite) { + if (best == null || bound.Preference() > best.Preference()) { + best = bound; + } + } + } + return best; + } + static List CombineIntegerBounds(List bounds) { + var lowerBounds = new List(); + var upperBounds = new List(); + var others = new List(); + foreach (var b in bounds) { + var ib = b as IntBoundedPool; + if (ib != null && ib.UpperBound == null) { + lowerBounds.Add(ib); + } else if (ib != null && ib.LowerBound == null) { + upperBounds.Add(ib); + } else { + others.Add(b); + } + } + // pair up the bounds + var n = Math.Min(lowerBounds.Count, upperBounds.Count); + for (var i = 0; i < n; i++) { + others.Add(new IntBoundedPool(lowerBounds[i].LowerBound, upperBounds[i].UpperBound)); + } + for (var i = n; i < lowerBounds.Count; i++) { + others.Add(lowerBounds[i]); + } + for (var i = n; i < upperBounds.Count; i++) { + others.Add(upperBounds[i]); + } + return others; + } + } + public class ExactBoundedPool : BoundedPool + { + public readonly Expression E; + public ExactBoundedPool(Expression e) { + Contract.Requires(e != null); + E = e; + } + public override int Preference() { + return 20; // the best of all bounds + } } public class BoolBoundedPool : BoundedPool { + public override int Preference() { + return 5; + } + } + public class CharBoundedPool : BoundedPool + { + public override int Preference() { + return 4; + } + } + public class RefBoundedPool : BoundedPool + { + public Type Type; + public RefBoundedPool(Type t) { + Type = t; + } + public override int Preference() { + return 2; + } } public class IntBoundedPool : BoundedPool { @@ -6501,36 +7150,60 @@ return LowerBound != null && UpperBound != null; } } + public override int Preference() { + return 1; + } } public class SetBoundedPool : BoundedPool { public readonly Expression Set; public SetBoundedPool(Expression set) { Set = set; } + public override int Preference() { + return 10; + } } public class SubSetBoundedPool : BoundedPool { public readonly Expression UpperBound; public SubSetBoundedPool(Expression set) { UpperBound = set; } + public override int Preference() { + return 1; + } } public class SuperSetBoundedPool : BoundedPool { public readonly Expression LowerBound; public SuperSetBoundedPool(Expression set) { LowerBound = set; } + public override int Preference() { + return 0; + } + public override bool IsFinite { + get { return false; } + } } public class MapBoundedPool : BoundedPool { public readonly Expression Map; public MapBoundedPool(Expression map) { Map = map; } + public override int Preference() { + return 10; + } } public class SeqBoundedPool : BoundedPool { public readonly Expression Seq; public SeqBoundedPool(Expression seq) { Seq = seq; } + public override int Preference() { + return 10; + } } public class DatatypeBoundedPool : BoundedPool { public readonly DatatypeDecl Decl; public DatatypeBoundedPool(DatatypeDecl d) { Decl = d; } + public override int Preference() { + return 5; + } } public List Bounds; // initialized and filled in by resolver @@ -6538,6 +7211,24 @@ public List MissingBounds; // filled in during resolution; remains "null" if bounds can be found // invariant Bounds == null || MissingBounds == null; + public List UncompilableBoundVars() { + var bvs = new List(); + if (MissingBounds != null) { + bvs.AddRange(MissingBounds); + } + if (Bounds != null) { + Contract.Assert(Bounds.Count == BoundVars.Count); + for (int i = 0; i < Bounds.Count; i++) { + var bound = Bounds[i]; + if (bound is RefBoundedPool) { + // yes, this is in principle a bound, but it's not one we'd like to compile + bvs.Add(BoundVars[i]); + } + } + } + return bvs; + } + public ComprehensionExpr(IToken tok, List bvars, Expression range, Expression term, Attributes attrs) : base(tok) { Contract.Requires(tok != null); @@ -6562,13 +7253,16 @@ } public abstract class QuantifierExpr : ComprehensionExpr, TypeParameter.ParentType { + private readonly int UniqueId; public List TypeArgs; private static int currentQuantId = -1; - static int FreshQuantId() - { + + protected abstract BinaryExpr.ResolvedOpcode SplitResolvedOp { get; } + + static int FreshQuantId() { return System.Threading.Interlocked.Increment(ref currentQuantId); } - private readonly int UniqueId; + public string FullName { get { return "q$" + UniqueId; @@ -6595,10 +7289,56 @@ this.TypeArgs = tvars; this.UniqueId = FreshQuantId(); } - public abstract Expression LogicalBody(); - } + private Expression SplitQuantifierToExpression() { + Contract.Requires(SplitQuantifier != null && SplitQuantifier.Any()); + Expression accumulator = SplitQuantifier[0]; + for (int tid = 1; tid < SplitQuantifier.Count; tid++) { + accumulator = new BinaryExpr(Term.tok, SplitResolvedOp, accumulator, SplitQuantifier[tid]); + } + return accumulator; + } + + private List _SplitQuantifier; + public List SplitQuantifier { + get { + return _SplitQuantifier; + } + set { + _SplitQuantifier = value; + SplitQuantifierExpression = SplitQuantifierToExpression(); + } + } + + internal Expression SplitQuantifierExpression { get; private set; } + + public virtual Expression LogicalBody(bool bypassSplitQuantifier = false) { + // Don't call this on a quantifier with a Split clause: it's not a real quantifier. The only exception is the Compiler. + Contract.Requires(bypassSplitQuantifier || SplitQuantifier == null); + throw new cce.UnreachableException(); // This body is just here for the "Requires" clause + } + + public override IEnumerable SubExpressions { + get { + if (SplitQuantifier == null) { + foreach (var e in base.SubExpressions) { + yield return e; + } + } else { + foreach (var e in Attributes.SubExpressions(Attributes)) { + yield return e; + } + foreach (var e in SplitQuantifier) { + yield return e; + } + } + } + } + } + public class ForallExpr : QuantifierExpr { + protected override BinaryExpr.ResolvedOpcode SplitResolvedOp { get { return BinaryExpr.ResolvedOpcode.And; } } + public ForallExpr(IToken tok, List bvars, Expression range, Expression term, Attributes attrs) : this(tok, new List(), bvars, range, term, attrs) { Contract.Requires(cce.NonNullElements(bvars)); @@ -6611,7 +7351,7 @@ Contract.Requires(tok != null); Contract.Requires(term != null); } - public override Expression LogicalBody() { + public override Expression LogicalBody(bool bypassSplitQuantifier = false) { if (Range == null) { return Term; } @@ -6623,6 +7363,8 @@ } public class ExistsExpr : QuantifierExpr { + protected override BinaryExpr.ResolvedOpcode SplitResolvedOp { get { return BinaryExpr.ResolvedOpcode.Or; } } + public ExistsExpr(IToken tok, List bvars, Expression range, Expression term, Attributes attrs) : this(tok, new List(), bvars, range, term, attrs) { Contract.Requires(cce.NonNullElements(bvars)); @@ -6635,7 +7377,7 @@ Contract.Requires(tok != null); Contract.Requires(term != null); } - public override Expression LogicalBody() { + public override Expression LogicalBody(bool bypassSplitQuantifier = false) { if (Range == null) { return Term; } @@ -6648,9 +7390,10 @@ public class SetComprehension : ComprehensionExpr { + public readonly bool Finite; public readonly bool TermIsImplicit; - public SetComprehension(IToken tok, List bvars, Expression range, Expression term, Attributes attrs) + public SetComprehension(IToken tok, bool finite, List bvars, Expression range, Expression term, Attributes attrs) : base(tok, bvars, range, term ?? new IdentifierExpr(tok, bvars[0].Name), attrs) { Contract.Requires(tok != null); Contract.Requires(cce.NonNullElements(bvars)); @@ -6658,6 +7401,7 @@ Contract.Requires(range != null); TermIsImplicit = term == null; + Finite = finite; } } public class MapComprehension : ComprehensionExpr @@ -6806,8 +7550,8 @@ } public class MatchExpr : Expression { // a MatchExpr is an "extended expression" and is only allowed in certain places - public readonly Expression Source; - public readonly List Cases; + private Expression source; + private List cases; public readonly List MissingCases = new List(); // filled in during resolution public readonly bool UsesOptionalBraces; @@ -6823,15 +7567,32 @@ Contract.Requires(tok != null); Contract.Requires(source != null); Contract.Requires(cce.NonNullElements(cases)); - this.Source = source; - this.Cases = cases; + this.source = source; + this.cases = cases; this.UsesOptionalBraces = usesOptionalBraces; } + public Expression Source { + get { return source; } + } + + public List Cases { + get { return cases; } + } + + // should only be used in desugar in resolve to change the source and cases of the matchexpr + public void UpdateSource(Expression source) { + this.source = source; + } + + public void UpdateCases(List cases) { + this.cases = cases; + } + public override IEnumerable SubExpressions { get { yield return Source; - foreach (var mc in Cases) { + foreach (var mc in cases) { yield return mc.Body; } } @@ -6913,12 +7674,13 @@ public readonly IToken tok; public readonly string Id; public DatatypeCtor Ctor; // filled in by resolution - public readonly List Arguments; + public List Arguments; // created by the resolver. + public List CasePatterns; // generated from parsers. It should be converted to List during resolver. Invariant: CasePatterns != null ==> Arguments == null [ContractInvariantMethod] void ObjectInvariant() { Contract.Invariant(tok != null); Contract.Invariant(Id != null); - Contract.Invariant(cce.NonNullElements(Arguments)); + Contract.Invariant(cce.NonNullElements(Arguments) || cce.NonNullElements(CasePatterns)); } public MatchCase(IToken tok, string id, [Captured] List arguments) { @@ -6929,24 +7691,51 @@ this.Id = id; this.Arguments = arguments; } + + public MatchCase(IToken tok, string id, [Captured] List cps) { + Contract.Requires(tok != null); + Contract.Requires(id != null); + Contract.Requires(cce.NonNullElements(cps)); + this.tok = tok; + this.Id = id; + this.CasePatterns = cps; + } } public class MatchCaseExpr : MatchCase { - public readonly Expression Body; + private Expression body; [ContractInvariantMethod] void ObjectInvariant() { - Contract.Invariant(Body != null); + Contract.Invariant(body != null); } public MatchCaseExpr(IToken tok, string id, [Captured] List arguments, Expression body) - : base(tok, id, arguments) - { + : base(tok, id, arguments) { Contract.Requires(tok != null); Contract.Requires(id != null); Contract.Requires(cce.NonNullElements(arguments)); Contract.Requires(body != null); - this.Body = body; + this.body = body; + } + + public MatchCaseExpr(IToken tok, string id, [Captured] List cps, Expression body) + : base(tok, id, cps) + { + Contract.Requires(tok != null); + Contract.Requires(id != null); + Contract.Requires(cce.NonNullElements(cps)); + Contract.Requires(body != null); + this.body = body; + } + + public Expression Body { + get { return body; } + } + + // should only be called by resolve to reset the body of the MatchCaseExpr + public void UpdateBody(Expression body) { + this.body = body; } } @@ -7122,6 +7911,36 @@ } } + public class DatatypeUpdateExpr : ConcreteSyntaxExpression + { + public readonly Expression Root; + public readonly List> Updates; + public DatatypeUpdateExpr(IToken tok, Expression root, List> updates) + : base(tok) { + Contract.Requires(tok != null); + Contract.Requires(root != null); + Contract.Requires(updates != null); + Contract.Requires(updates.Count != 0); + Root = root; + Updates = updates; + } + + public override IEnumerable SubExpressions { + get { + if (ResolvedExpression == null) { + yield return Root; + foreach (var update in Updates) { + yield return update.Item3; + } + } else { + foreach (var e in ResolvedExpression.SubExpressions) { + yield return e; + } + } + } + } + } + /// /// An AutoGeneratedExpression is simply a wrapper around an expression. This expression tells the generation of hover text (in the Dafny IDE) @@ -7348,6 +8167,48 @@ public class BottomUpVisitor { + public void Visit(IEnumerable exprs) { + exprs.Iter(Visit); + } + public void Visit(IEnumerable stmts) { + stmts.Iter(Visit); + } + public void Visit(MaybeFreeExpression expr) { + Visit(expr.E); + } + public void Visit(FrameExpression expr) { + Visit(expr.E); + } + public void Visit(IEnumerable exprs) { + exprs.Iter(Visit); + } + public void Visit(IEnumerable exprs) { + exprs.Iter(Visit); + } + public void Visit(ICallable decl) { + if (decl is Function) { + Visit((Function)decl); + } else if (decl is Method) { + Visit((Method)decl); + } + //TODO More? + } + public void Visit(Method method) { + Visit(method.Ens); + Visit(method.Req); + Visit(method.Mod.Expressions); + Visit(method.Decreases.Expressions); + if (method.Body != null) { Visit(method.Body); } + //TODO More? + } + public void Visit(Function function) { + Visit(function.Ens); + Visit(function.Req); + Visit(function.Reads); + Visit(function.Decreases.Expressions); + if (function.Body != null) { Visit(function.Body); } + //TODO More? + } public void Visit(Expression expr) { Contract.Requires(expr != null); // recursively visit all subexpressions and all substatements @@ -7397,6 +8258,48 @@ stmt.SubStatements.Iter(s => Visit(s, st)); } } + public void Visit(IEnumerable exprs, State st) { + exprs.Iter(e => Visit(e, st)); + } + public void Visit(IEnumerable stmts, State st) { + stmts.Iter(e => Visit(e, st)); + } + public void Visit(MaybeFreeExpression expr, State st) { + Visit(expr.E, st); + } + public void Visit(FrameExpression expr, State st) { + Visit(expr.E, st); + } + public void Visit(IEnumerable exprs, State st) { + exprs.Iter(e => Visit(e, st)); + } + public void Visit(IEnumerable exprs, State st) { + exprs.Iter(e => Visit(e, st)); + } + public void Visit(ICallable decl, State st) { + if (decl is Function) { + Visit((Function)decl, st); + } else if (decl is Method) { + Visit((Method)decl, st); + } + //TODO More? + } + public void Visit(Method method, State st) { + Visit(method.Ens, st); + Visit(method.Req, st); + Visit(method.Mod.Expressions, st); + Visit(method.Decreases.Expressions, st); + if (method.Body != null) { Visit(method.Body, st); } + //TODO More? + } + public void Visit(Function function, State st) { + Visit(function.Ens, st); + Visit(function.Req, st); + Visit(function.Reads, st); + Visit(function.Decreases.Expressions, st); + if (function.Body != null) { Visit(function.Body, st); } + //TODO More? + } /// /// Visit one expression proper. This method is invoked before it is invoked on the /// sub-parts (sub-expressions and sub-statements). A return value of "true" says to diff -Nru dafny-1.9.5/Source/Dafny/Dafny.atg dafny-1.9.7/Source/Dafny/Dafny.atg --- dafny-1.9.5/Source/Dafny/Dafny.atg 2015-05-11 08:03:26.000000000 +0000 +++ dafny-1.9.7/Source/Dafny/Dafny.atg 2016-06-05 21:11:14.000000000 +0000 @@ -16,17 +16,120 @@ /*--------------------------------------------------------------------------*/ readonly Expression/*!*/ dummyExpr; readonly AssignmentRhs/*!*/ dummyRhs; -readonly FrameExpression/*!*/ dummyFrameExpr; +readonly FrameExpression/*!*/ dummyFrameExpr; readonly Statement/*!*/ dummyStmt; readonly ModuleDecl theModule; readonly BuiltIns theBuiltIns; readonly bool theVerifyThisFile; int anonymousIds = 0; -struct MemberModifiers { +/// +/// Holds the modifiers given for a declaration +/// +/// Not all modifiers are applicable to all kinds of declarations. +/// Errors are given when a modify does not apply. +/// We also record the tokens for the specified modifiers so that +/// they can be used in error messages. +/// +struct DeclModifierData { + public bool IsAbstract; + public IToken AbstractToken; public bool IsGhost; + public IToken GhostToken; public bool IsStatic; + public IToken StaticToken; public bool IsProtected; + public IToken ProtectedToken; + public bool IsExtern; + public IToken ExternToken; + public StringLiteralExpr ExternName; + +} + +// Check that token has not been set, then set it. +public void CheckAndSetToken(ref IToken token) +{ + if (token != null) { + SemErr(t, "Duplicate declaration modifier: " + t.val); + } + token = t; +} + +/// +// A flags type used to tell what declaration modifiers are allowed for a declaration. +/// +[Flags] +enum AllowedDeclModifiers { + None = 0, + Abstract = 1, + Ghost = 2, + + // Means ghost not allowed because already implicitly ghost. + AlreadyGhost = 4, + Static = 8, + Protected = 16, + Extern = 32 +}; + +/// +/// Check the declaration modifiers against those that are allowed. +/// +/// The 'allowed' parameter specifies which declaratio modifiers are allowed. +/// The 'declCaption' parameter should be a string describing the kind of declaration. +/// It is used in error messages. +/// Any declaration modifiers that are present but not allowed are cleared. +/// +void CheckDeclModifiers(DeclModifierData dmod, string declCaption, AllowedDeclModifiers allowed) +{ + if (dmod.IsAbstract && ((allowed & AllowedDeclModifiers.Abstract) == 0)) { + SemErr(dmod.AbstractToken, declCaption + " cannot be declared 'abstract'."); + dmod.IsAbstract = false; + } + if (dmod.IsGhost) { + if ((allowed & AllowedDeclModifiers.AlreadyGhost) != 0) { + SemErr(dmod.GhostToken, declCaption + " cannot be declared ghost (they are 'ghost' by default)."); + dmod.IsGhost = false; + } else if ((allowed & AllowedDeclModifiers.Ghost) == 0) { + SemErr(dmod.GhostToken, declCaption + " cannot be declared 'ghost'."); + dmod.IsGhost = false; + } + } + if (dmod.IsStatic && ((allowed & AllowedDeclModifiers.Static) == 0)) { + SemErr(dmod.StaticToken, declCaption + " cannot be declared 'static'."); + dmod.IsStatic = false; + } + if (dmod.IsProtected && ((allowed & AllowedDeclModifiers.Protected) == 0)) { + SemErr(dmod.ProtectedToken, declCaption + " cannot be declared 'protected'."); + dmod.IsProtected = false; + } + if (dmod.IsExtern && ((allowed & AllowedDeclModifiers.Extern) == 0)) { + SemErr(dmod.ExternToken, declCaption + " cannot be declared 'extern'."); + dmod.IsExtern = false; + } +} + +/// +/// Encode an 'extern' declaration modifier as an {:extern name} attribute. +/// +/// We also include an {:axiom} attribute since the specification of an +/// external entity is assumed to hold, but only for methods or functions. +/// +static void EncodeExternAsAttribute(DeclModifierData dmod, ref Attributes attrs, IToken/*!*/ id, bool needAxiom) { + if (dmod.IsExtern) { + StringLiteralExpr name = dmod.ExternName; + if (name == null) { + bool isVerbatimString = false; + name = new StringLiteralExpr(id, id.val, isVerbatimString); + } + var args = new List(); + args.Add(name); + attrs = new Attributes("extern", args, attrs); + + // Also 'extern' implies 'axiom' for methods or functions. + if (needAxiom) { + attrs = new Attributes("axiom", new List(), attrs); + } + } } /// @@ -41,11 +144,11 @@ string s; if (filename == "stdin.dfy") { s = Microsoft.Boogie.ParserHelper.Fill(System.Console.In, new List()); - return Parse(s, filename, module, builtIns, errors, verifyThisFile); + return Parse(s, filename, filename, module, builtIns, errors, verifyThisFile); } else { using (System.IO.StreamReader reader = new System.IO.StreamReader(filename)) { s = Microsoft.Boogie.ParserHelper.Fill(reader, new List()); - return Parse(s, DafnyOptions.Clo.UseBaseNameForFileName ? Path.GetFileName(filename) : filename, module, builtIns, errors, verifyThisFile); + return Parse(s, filename, DafnyOptions.Clo.UseBaseNameForFileName ? Path.GetFileName(filename) : filename, module, builtIns, errors, verifyThisFile); } } } @@ -55,12 +158,12 @@ /// Returns the number of parsing errors encountered. /// Note: first initialize the Scanner. /// -public static int Parse (string/*!*/ s, string/*!*/ filename, ModuleDecl module, BuiltIns builtIns, bool verifyThisFile=true) { +public static int Parse (string/*!*/ s, string/*!*/ fullFilename, string/*!*/ filename, ModuleDecl module, BuiltIns builtIns, ErrorReporter reporter, bool verifyThisFile=true) { Contract.Requires(s != null); Contract.Requires(filename != null); Contract.Requires(module != null); - Errors errors = new Errors(); - return Parse(s, filename, module, builtIns, errors, verifyThisFile); + Errors errors = new Errors(reporter); + return Parse(s, fullFilename, filename, module, builtIns, errors, verifyThisFile); } /// /// Parses top-level things (modules, classes, datatypes, class members) @@ -68,18 +171,18 @@ /// Returns the number of parsing errors encountered. /// Note: first initialize the Scanner with the given Errors sink. /// -public static int Parse (string/*!*/ s, string/*!*/ filename, ModuleDecl module, BuiltIns builtIns, - Errors/*!*/ errors, bool verifyThisFile=true) { +public static int Parse (string/*!*/ s, string/*!*/ fullFilename, string/*!*/ filename, ModuleDecl module, + BuiltIns builtIns, Errors/*!*/ errors, bool verifyThisFile=true) { Contract.Requires(s != null); Contract.Requires(filename != null); Contract.Requires(module != null); Contract.Requires(errors != null); byte[]/*!*/ buffer = cce.NonNull( UTF8Encoding.Default.GetBytes(s)); MemoryStream ms = new MemoryStream(buffer,false); - Scanner scanner = new Scanner(ms, errors, filename); + Scanner scanner = new Scanner(ms, errors, fullFilename, filename); Parser parser = new Parser(scanner, errors, module, builtIns, verifyThisFile); parser.Parse(); - return parser.errors.count; + return parser.errors.ErrorCount; } public Parser(Scanner/*!*/ scanner, Errors/*!*/ errors, ModuleDecl module, BuiltIns builtIns, bool verifyThisFile=true) : this(scanner, errors) // the real work @@ -104,6 +207,25 @@ return la.kind == _lbrace && x.kind == _case; } +// an existential guard starts with an identifier and is then followed by +// * a colon (if the first identifier is given an explicit type), +// * a comma (if there's a list a bound variables and the first one is not given an explicit type), +// * a start-attribute (if there's one bound variable and it is not given an explicit type and there are attributes), or +// * a bored smiley (if there's one bound variable and it is not given an explicit type). +bool IsExistentialGuard() { + scanner.ResetPeek(); + if (la.kind == _ident) { + Token x = scanner.Peek(); + if (x.kind == _colon || x.kind == _comma || x.kind == _boredSmiley) { + return true; + } else if (x.kind == _lbrace) { + x = scanner.Peek(); + return x.kind == _colon; + } + } + return false; +} + bool IsLoopSpec() { return la.kind == _invariant | la.kind == _decreases | la.kind == _modifies; } @@ -175,6 +297,9 @@ bool IsIMapDisplay() { return la.kind == _imap && scanner.Peek().kind == _lbracket; } +bool IsISetDisplay() { + return la.kind == _iset && scanner.Peek().kind == _lbrace; +} bool IsSuffix() { return la.kind == _dot || la.kind == _lbracket || la.kind == _openparen; @@ -296,6 +421,9 @@ return false; } } +/* Returns true if the next thing is of the form: + * "<" Type { "," Type } ">" + */ bool IsTypeList(ref IToken pt) { if (pt.kind != _openAngleBracket) { return false; @@ -303,6 +431,10 @@ pt = scanner.Peek(); return IsTypeSequence(ref pt, _closeAngleBracket); } +/* Returns true if the next thing is of the form: + * Type { "," Type } + * followed by an endBracketKind. + */ bool IsTypeSequence(ref IToken pt, int endBracketKind) { while (true) { if (!IsType(ref pt)) { @@ -334,12 +466,13 @@ return true; case _arrayToken: case _set: + case _iset: case _multiset: case _seq: case _map: case _imap: pt = scanner.Peek(); - return IsTypeList(ref pt); + return pt.kind != _openAngleBracket || IsTypeList(ref pt); case _ident: while (true) { // invariant: next token is an ident @@ -358,12 +491,24 @@ } case _openparen: pt = scanner.Peek(); + if (pt.kind == _closeparen) { + // end of type list + pt = scanner.Peek(); + return true; + } return IsTypeSequence(ref pt, _closeparen); default: return false; } } + +bool IsDefaultImport() { + scanner.ResetPeek(); + Token x = scanner.Peek(); // lookahead token again + return la.val == "default" && x.val != "export"; +} + /*--------------------------------------------------------------------------*/ CHARACTERS letter = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz". @@ -416,6 +561,7 @@ object = "object". string = "string". set = "set". + iset = "iset". multiset = "multiset". seq = "seq". map = "map". @@ -439,6 +585,7 @@ comma = ','. verticalbar = '|'. doublecolon = "::". + boredSmiley = ":|". bullet = '\u2022'. dot = '.'. semi = ';'. @@ -479,17 +626,14 @@ /*------------------------------------------------------------------------*/ PRODUCTIONS Dafny -= (. ClassDecl/*!*/ c; DatatypeDecl/*!*/ dt; TopLevelDecl td; IteratorDecl iter; - List membersDefaultClass = new List(); - ModuleDecl submodule; += (. List membersDefaultClass = new List(); // to support multiple files, create a default module only if theModule is null DefaultModuleDecl defaultModule = (DefaultModuleDecl)((LiteralModuleDecl)theModule).ModuleDef; // theModule should be a DefaultModuleDecl (actually, the singular DefaultModuleDecl) - TraitDecl/*!*/ trait; Contract.Assert(defaultModule != null); .) { "include" stringToken (. { - string parsedFile = t.filename; + string parsedFile = scanner.FullFilename; bool isVerbatimString; string includedFile = Util.RemoveParsedStringQuotes(t.val, out isVerbatimString); includedFile = Util.RemoveEscaping(includedFile, isVerbatimString); @@ -503,15 +647,7 @@ } .) } - { SubModuleDecl (. defaultModule.TopLevelDecls.Add(submodule); .) - | ClassDecl (. defaultModule.TopLevelDecls.Add(c); .) - | DatatypeDecl (. defaultModule.TopLevelDecls.Add(dt); .) - | NewtypeDecl (. defaultModule.TopLevelDecls.Add(td); .) - | OtherTypeDecl (. defaultModule.TopLevelDecls.Add(td); .) - | IteratorDecl (. defaultModule.TopLevelDecls.Add(iter); .) - | TraitDecl (. defaultModule.TopLevelDecls.Add(trait); .) - | ClassMemberDecl - } + { TopDecl } (. // find the default class in the default module, then append membersDefaultClass to its member list DefaultClassDecl defaultClass = null; foreach (TopLevelDecl topleveldecl in defaultModule.TopLevelDecls) { @@ -527,44 +663,78 @@ } .) EOF . -SubModuleDecl -= (. ClassDecl/*!*/ c; DatatypeDecl/*!*/ dt; TopLevelDecl td; IteratorDecl iter; - Attributes attrs = null; IToken/*!*/ id; - TraitDecl/*!*/ trait; - List namedModuleDefaultClassMembers = new List();; + +DeclModifier += ( "abstract" (. dmod.IsAbstract = true; CheckAndSetToken(ref dmod.AbstractToken); .) + | "ghost" (. dmod.IsGhost = true; CheckAndSetToken(ref dmod.GhostToken); .) + | "static" (. dmod.IsStatic = true; CheckAndSetToken(ref dmod.StaticToken); .) + | "protected" (. dmod.IsProtected = true; CheckAndSetToken(ref dmod.ProtectedToken); .) + | "extern" (. dmod.IsExtern = true; CheckAndSetToken(ref dmod.ExternToken); .) + [ stringToken (. bool isVerbatimString; + string s = Util.RemoveParsedStringQuotes(t.val, out isVerbatimString); + dmod.ExternName = new StringLiteralExpr(t, s, isVerbatimString); + .) + ] + ) + . + +TopDecl<. ModuleDefinition module, List membersDefaultClass, bool isTopLevel, bool isAbstract .> += (. DeclModifierData dmod = new DeclModifierData(); ModuleDecl submodule; + ClassDecl/*!*/ c; DatatypeDecl/*!*/ dt; TopLevelDecl td; IteratorDecl iter; + TraitDecl/*!*/ trait; + .) + { DeclModifier } + ( SubModuleDecl (. module.TopLevelDecls.Add(submodule); .) + | ClassDecl (. module.TopLevelDecls.Add(c); .) + | DatatypeDecl (. module.TopLevelDecls.Add(dt); .) + | NewtypeDecl (. module.TopLevelDecls.Add(td); .) + | OtherTypeDecl (. module.TopLevelDecls.Add(td); .) + | IteratorDecl (. module.TopLevelDecls.Add(iter); .) + | TraitDecl (. module.TopLevelDecls.Add(trait); .) + | ClassMemberDecl + ) . + +SubModuleDecl += (. Attributes attrs = null; IToken/*!*/ id; + List namedModuleDefaultClassMembers = new List();; List idRefined = null, idPath = null, idAssignment = null; ModuleDefinition module; - ModuleDecl sm; submodule = null; // appease compiler - bool isAbstract = false; + bool isAbstract = dmod.IsAbstract; + bool isExclusively = false; bool opened = false; + CheckDeclModifiers(dmod, "Modules", AllowedDeclModifiers.Abstract | AllowedDeclModifiers.Extern); .) - ( [ "abstract" (. isAbstract = true; .) ] - "module" + ( "module" { Attribute } NoUSIdent + (. EncodeExternAsAttribute(dmod, ref attrs, id, /* needAxiom */ false); .) - [ "refines" QualifiedModuleName ] (. module = new ModuleDefinition(id, id.val, isAbstract, false, idRefined == null ? null : idRefined, parent, attrs, false); .) + [ "exclusively" "refines" QualifiedModuleName (. isExclusively = true; .) + | "refines" QualifiedModuleName (. isExclusively = false; .) ] + (. module = new ModuleDefinition(id, id.val, isAbstract, false, isExclusively, idRefined == null ? null : idRefined, parent, attrs, false, this); .) "{" (. module.BodyStartTok = t; .) - { SubModuleDecl (. module.TopLevelDecls.Add(sm); .) - | ClassDecl (. module.TopLevelDecls.Add(c); .) - | TraitDecl (. module.TopLevelDecls.Add(trait); .) - | DatatypeDecl (. module.TopLevelDecls.Add(dt); .) - | NewtypeDecl (. module.TopLevelDecls.Add(td); .) - | OtherTypeDecl (. module.TopLevelDecls.Add(td); .) - | IteratorDecl (. module.TopLevelDecls.Add(iter); .) - | ClassMemberDecl - } - "}" (. module.BodyEndTok = t; + { TopDecl} + "}" (. module.BodyEndTok = t; module.TopLevelDecls.Add(new DefaultClassDecl(module, namedModuleDefaultClassMembers)); submodule = new LiteralModuleDecl(module, parent); .) | "import" ["opened" (.opened = true;.)] NoUSIdent + (. EncodeExternAsAttribute(dmod, ref attrs, id, /* needAxiom */ false); .) [ "=" QualifiedModuleName (. submodule = new AliasModuleDecl(idPath, id, parent, opened); .) - | "as" QualifiedModuleName ["default" QualifiedModuleName ] + | "as" QualifiedModuleName [IF(IsDefaultImport()) "default" QualifiedModuleName ] + (. submodule = new ModuleFacadeDecl(idPath, id, parent, idAssignment, opened); + errors.Warning(t, "\"import A as B\" has been deprecated; in the new syntax, it is \"import A:B\""); + .) + | ":" QualifiedModuleName (. submodule = new ModuleFacadeDecl(idPath, id, parent, idAssignment, opened); .) + | "." QualifiedModuleName + (. idPath.Insert(0, id); + submodule = new AliasModuleDecl(idPath, id, parent, opened); + .) ] [ SYNC ";" // This semi-colon used to be required, but it seems silly to have it. @@ -578,6 +748,33 @@ submodule = new AliasModuleDecl(idPath, id, parent, opened); } .) + | (. + bool isDefault = false; + bool includeBody; + IToken exportId; + List exports = new List();; + List extends = new List(); + .) + ["default" (. isDefault = true; .) ] + "export" + NoUSIdent + ["extends" + NoUSIdent(. extends.Add(id.val); .) + {"," NoUSIdent (. extends.Add(id.val); .) } + ] + "{" + NoUSIdent (. includeBody = false; .) + ['+' (. includeBody = true; .)] + (. exports.Add(new ExportSignature(id, includeBody)); .) + { "," + NoUSIdent (. includeBody = false; .) + ['+' (. includeBody = true; .)] + (. exports.Add(new ExportSignature(id, includeBody)); .) + } + "}" + (. + submodule = new ModuleExportDecl(exportId, parent, isDefault, exports, extends); + .) ) . @@ -589,7 +786,8 @@ } . -ClassDecl + +ClassDecl = (. Contract.Requires(module != null); Contract.Ensures(Contract.ValueAtReturn(out c) != null); IToken/*!*/ id; @@ -599,18 +797,23 @@ List typeArgs = new List(); List members = new List(); IToken bodyStart; + CheckDeclModifiers(dmodClass, "Classes", AllowedDeclModifiers.Extern); + DeclModifierData dmod; .) SYNC "class" { Attribute } NoUSIdent + (. EncodeExternAsAttribute(dmodClass, ref attrs, id, /* needAxiom */ false); .) [ GenericParameters ] ["extends" Type (. traits.Add(trait); .) {"," Type (. traits.Add(trait); .) } ] "{" (. bodyStart = t; .) - { ClassMemberDecl + { (. dmod = new DeclModifierData(); .) + { DeclModifier } + ClassMemberDecl } "}" (. c = new ClassDecl(id, id.val, module, typeArgs, members, attrs, traits); @@ -619,23 +822,27 @@ .) . - TraitDecl - = (. Contract.Requires(module != null); +TraitDecl + = (. Contract.Requires(module != null); Contract.Ensures(Contract.ValueAtReturn(out trait) != null); + CheckDeclModifiers(dmodIn, "Traits", AllowedDeclModifiers.None); IToken/*!*/ id; Attributes attrs = null; List typeArgs = new List(); //traits should not support type parameters at the moment List members = new List(); IToken bodyStart; + DeclModifierData dmod; .) SYNC "trait" { Attribute } NoUSIdent [ GenericParameters ] - "{" (. bodyStart = t; .) - { ClassMemberDecl - } + "{" (. bodyStart = t; .) + { (. dmod = new DeclModifierData(); .) + { DeclModifier } + ClassMemberDecl + } "}" (. trait = new TraitDecl(id, id.val, module, typeArgs, members, attrs); trait.BodyStartTok = bodyStart; @@ -643,44 +850,33 @@ .) . -ClassMemberDecl<.List mm, bool allowConstructors, bool moduleLevelDecl.> +ClassMemberDecl<. DeclModifierData dmod, List mm, bool allowConstructors, bool moduleLevelDecl, bool isWithinAbstractModule.> = (. Contract.Requires(cce.NonNullElements(mm)); Method/*!*/ m; Function/*!*/ f; - MemberModifiers mmod = new MemberModifiers(); - IToken staticToken = null, protectedToken = null; .) - { "ghost" (. mmod.IsGhost = true; .) - | "static" (. mmod.IsStatic = true; staticToken = t; .) - | "protected" (. mmod.IsProtected = true; protectedToken = t; .) - } ( (. if (moduleLevelDecl) { SemErr(la, "fields are not allowed to be declared at the module level; instead, wrap the field in a 'class' declaration"); - mmod.IsStatic = false; - mmod.IsProtected = false; + dmod.IsStatic = false; } .) - FieldDecl + FieldDecl | IF(IsFunctionDecl()) - (. if (moduleLevelDecl && staticToken != null) { - errors.Warning(staticToken, "module-level functions are always non-instance, so the 'static' keyword is not allowed here"); - mmod.IsStatic = false; + (. if (moduleLevelDecl && dmod.StaticToken != null) { + errors.Warning(dmod.StaticToken, "module-level functions are always non-instance, so the 'static' keyword is not allowed here"); + dmod.IsStatic = false; } .) - FunctionDecl (. mm.Add(f); .) - | (. if (moduleLevelDecl && staticToken != null) { - errors.Warning(staticToken, "module-level methods are always non-instance, so the 'static' keyword is not allowed here"); - mmod.IsStatic = false; - } - if (protectedToken != null) { - SemErr(protectedToken, "only functions, not methods, can be declared 'protected'"); - mmod.IsProtected = false; + FunctionDecl (. mm.Add(f); .) + | (. if (moduleLevelDecl && dmod.StaticToken != null) { + errors.Warning(dmod.StaticToken, "module-level methods are always non-instance, so the 'static' keyword is not allowed here"); + dmod.IsStatic = false; } .) - MethodDecl (. mm.Add(m); .) + MethodDecl (. mm.Add(m); .) ) . -DatatypeDecl +DatatypeDecl = (. Contract.Requires(module != null); Contract.Ensures(Contract.ValueAtReturn(out dt)!=null); IToken/*!*/ id; @@ -689,6 +885,7 @@ List ctors = new List(); IToken bodyStart = Token.NoToken; // dummy assignment bool co = false; + CheckDeclModifiers(dmod, "Datatypes or codatatypes", AllowedDeclModifiers.None); .) SYNC ( "datatype" @@ -726,27 +923,27 @@ [ FormalsOptionalIds ] (. ctors.Add(new DatatypeCtor(id, id.val, formals, attrs)); .) . -FieldDecl<.MemberModifiers mmod, List/*!*/ mm.> +FieldDecl<.DeclModifierData dmod, List/*!*/ mm.> = (. Contract.Requires(cce.NonNullElements(mm)); Attributes attrs = null; IToken/*!*/ id; Type/*!*/ ty; + CheckDeclModifiers(dmod, "Fields", AllowedDeclModifiers.Ghost); .) SYNC "var" - (. if (mmod.IsStatic) { SemErr(t, "fields cannot be declared 'static'"); } - .) { Attribute } - FIdentType (. mm.Add(new Field(id, id.val, mmod.IsGhost, ty, attrs)); .) - { "," FIdentType (. mm.Add(new Field(id, id.val, mmod.IsGhost, ty, attrs)); .) + FIdentType (. mm.Add(new Field(id, id.val, dmod.IsGhost, ty, attrs)); .) + { "," FIdentType (. mm.Add(new Field(id, id.val, dmod.IsGhost, ty, attrs)); .) } OldSemi . -NewtypeDecl +NewtypeDecl = (. IToken id, bvId; Attributes attrs = null; td = null; Type baseType = null; Expression wh; + CheckDeclModifiers(dmod, "Newtypes", AllowedDeclModifiers.None); .) "newtype" { Attribute } @@ -754,19 +951,20 @@ "=" ( IF(IsIdentColonOrBar()) NoUSIdent - [ ":" Type ] (. if (baseType == null) { baseType = new OperationTypeProxy(true, true, false, false, false); } .) + [ ":" Type ] (. if (baseType == null) { baseType = new OperationTypeProxy(true, true, false, false, false, false); } .) "|" Expression (. td = new NewtypeDecl(theVerifyThisFile ? id : new IncludeToken(id), id.val, module, new BoundVar(bvId, bvId.val, baseType), wh, attrs); .) | Type (. td = new NewtypeDecl(theVerifyThisFile ? id : new IncludeToken(id), id.val, module, baseType, attrs); .) ) . -OtherTypeDecl +OtherTypeDecl = (. IToken id; Attributes attrs = null; var eqSupport = TypeParameter.EqualitySupportValue.Unspecified; var typeArgs = new List(); td = null; Type ty; + CheckDeclModifiers(dmod, "Type aliases", AllowedDeclModifiers.None); .) "type" { Attribute } @@ -862,7 +1060,7 @@ .) . /*------------------------------------------------------------------------*/ -IteratorDecl +IteratorDecl = (. Contract.Ensures(Contract.ValueAtReturn(out iter) != null); IToken/*!*/ id; Attributes attrs = null; @@ -884,6 +1082,7 @@ IToken signatureEllipsis = null; IToken bodyStart = Token.NoToken; IToken bodyEnd = Token.NoToken; + CheckDeclModifiers(dmod, "Iterators", AllowedDeclModifiers.None); .) SYNC "iterator" @@ -929,7 +1128,7 @@ ">" . /*------------------------------------------------------------------------*/ -MethodDecl +MethodDecl = (. Contract.Ensures(Contract.ValueAtReturn(out m) !=null); IToken/*!*/ id = Token.NoToken; bool hasName = false; IToken keywordToken; @@ -951,43 +1150,36 @@ IToken signatureEllipsis = null; IToken bodyStart = Token.NoToken; IToken bodyEnd = Token.NoToken; + AllowedDeclModifiers allowed = AllowedDeclModifiers.None; + string caption = ""; .) SYNC - ( "method" - | "lemma" (. isLemma = true; .) - | "colemma" (. isCoLemma = true; .) - | "comethod" (. isCoLemma = true; + ( "method" (. caption = "Methods"; + allowed = AllowedDeclModifiers.Ghost | AllowedDeclModifiers.Static + | AllowedDeclModifiers.Extern; .) + | "lemma" (. isLemma = true; caption = "Lemmas"; + allowed = AllowedDeclModifiers.AlreadyGhost | AllowedDeclModifiers.Static + | AllowedDeclModifiers.Protected; .) + | "colemma" (. isCoLemma = true; caption = "Colemmas"; + allowed = AllowedDeclModifiers.AlreadyGhost | AllowedDeclModifiers.Static + | AllowedDeclModifiers.Protected; .) + | "comethod" (. isCoLemma = true; caption = "Comethods"; + allowed = AllowedDeclModifiers.AlreadyGhost | AllowedDeclModifiers.Static + | AllowedDeclModifiers.Protected; errors.Warning(t, "the 'comethod' keyword has been deprecated; it has been renamed to 'colemma'"); .) - | "inductive" "lemma" (. isIndLemma = true; .) + | "inductive" "lemma" (. isIndLemma = true; caption = "Inductive lemmas"; + allowed = AllowedDeclModifiers.AlreadyGhost | AllowedDeclModifiers.Static;.) | "constructor" (. if (allowConstructor) { isConstructor = true; } else { SemErr(t, "constructors are allowed only in classes"); - } + } + caption = "Constructors"; + allowed = AllowedDeclModifiers.None; .) - ) (. keywordToken = t; .) - (. if (isLemma) { - if (mmod.IsGhost) { - SemErr(t, "lemmas cannot be declared 'ghost' (they are automatically 'ghost')"); - } - } else if (isConstructor) { - if (mmod.IsGhost) { - SemErr(t, "constructors cannot be declared 'ghost'"); - } - if (mmod.IsStatic) { - SemErr(t, "constructors cannot be declared 'static'"); - } - } else if (isIndLemma) { - if (mmod.IsGhost) { - SemErr(t, "inductive lemmas cannot be declared 'ghost' (they are automatically 'ghost')"); - } - } else if (isCoLemma) { - if (mmod.IsGhost) { - SemErr(t, "colemmas cannot be declared 'ghost' (they are automatically 'ghost')"); - } - } - .) + ) (. keywordToken = t; + CheckDeclModifiers(dmod, caption, allowed); .) { Attribute } [ NoUSIdent (. hasName = true; .) ] @@ -997,12 +1189,13 @@ SemErr(la, "a method must be given a name (expecting identifier)"); } } + EncodeExternAsAttribute(dmod, ref attrs, id, /* needAxiom */ true); .) ( [ GenericParameters ] - Formals + Formals [ "returns" (. if (isConstructor) { SemErr(t, "constructors cannot have out-parameters"); } .) - Formals + Formals ] | "..." (. signatureEllipsis = t; .) ) @@ -1010,7 +1203,7 @@ [ BlockStmt ] (. - if (DafnyOptions.O.DisallowSoundnessCheating && body == null && ens.Count > 0 && !Attributes.Contains(attrs, "axiom") && !Attributes.Contains(attrs, "imported") && !Attributes.Contains(attrs, "decl") && theVerifyThisFile) { + if (!isWithinAbstractModule && DafnyOptions.O.DisallowSoundnessCheating && body == null && ens.Count > 0 && !Attributes.Contains(attrs, "axiom") && !Attributes.Contains(attrs, "imported") && !Attributes.Contains(attrs, "decl") && theVerifyThisFile) { SemErr(t, "a method with an ensures clause must have a body, unless given the :axiom attribute"); } @@ -1019,16 +1212,16 @@ m = new Constructor(tok, hasName ? id.val : "_ctor", typeArgs, ins, req, new Specification(mod, modAttrs), ens, new Specification(dec, decAttrs), body, attrs, signatureEllipsis); } else if (isIndLemma) { - m = new InductiveLemma(tok, id.val, mmod.IsStatic, typeArgs, ins, outs, + m = new InductiveLemma(tok, id.val, dmod.IsStatic, typeArgs, ins, outs, req, new Specification(mod, modAttrs), ens, new Specification(dec, decAttrs), body, attrs, signatureEllipsis); } else if (isCoLemma) { - m = new CoLemma(tok, id.val, mmod.IsStatic, typeArgs, ins, outs, + m = new CoLemma(tok, id.val, dmod.IsStatic, typeArgs, ins, outs, req, new Specification(mod, modAttrs), ens, new Specification(dec, decAttrs), body, attrs, signatureEllipsis); } else if (isLemma) { - m = new Lemma(tok, id.val, mmod.IsStatic, typeArgs, ins, outs, + m = new Lemma(tok, id.val, dmod.IsStatic, typeArgs, ins, outs, req, new Specification(mod, modAttrs), ens, new Specification(dec, decAttrs), body, attrs, signatureEllipsis); } else { - m = new Method(tok, id.val, mmod.IsStatic, mmod.IsGhost, typeArgs, ins, outs, + m = new Method(tok, id.val, dmod.IsStatic, dmod.IsGhost, typeArgs, ins, outs, req, new Specification(mod, modAttrs), ens, new Specification(dec, decAttrs), body, attrs, signatureEllipsis); } m.BodyStartTok = bodyStart; @@ -1139,7 +1332,13 @@ [ GenericInstantiation ] (. if (gt.Count > 1) { SemErr("set type expects only one type argument"); } - ty = new SetType(gt.Count == 1 ? gt[0] : null); + ty = new SetType(true, gt.Count == 1 ? gt[0] : null); + .) + | "iset" (. tok = t; gt = new List(); .) + [ GenericInstantiation ] (. if (gt.Count > 1) { + SemErr("set type expects only one type argument"); + } + ty = new SetType(false, gt.Count == 1 ? gt[0] : null); .) | "multiset" (. tok = t; gt = new List(); .) [ GenericInstantiation ] (. if (gt.Count > 1) { @@ -1227,7 +1426,7 @@ ">" . /*------------------------------------------------------------------------*/ -FunctionDecl +FunctionDecl = (. Contract.Ensures(Contract.ValueAtReturn(out f)!=null); Attributes attrs = null; IToken/*!*/ id = Token.NoToken; // to please compiler @@ -1250,7 +1449,13 @@ ( "function" [ "method" (. isFunctionMethod = true; .) ] - (. if (mmod.IsGhost) { SemErr(t, "functions cannot be declared 'ghost' (they are ghost by default)"); } + (. AllowedDeclModifiers allowed = AllowedDeclModifiers.AlreadyGhost | AllowedDeclModifiers.Static | AllowedDeclModifiers.Protected; + string caption = "Functions"; + if (isFunctionMethod) { + allowed |= AllowedDeclModifiers.Extern; + caption = "Function methods"; + } + CheckDeclModifiers(dmod, caption, allowed); .) { Attribute } NoUSIdent @@ -1266,7 +1471,13 @@ | "predicate" (. isPredicate = true; .) [ "method" (. isFunctionMethod = true; .) ] - (. if (mmod.IsGhost) { SemErr(t, "predicates cannot be declared 'ghost' (they are ghost by default)"); } + (. AllowedDeclModifiers allowed = AllowedDeclModifiers.AlreadyGhost | AllowedDeclModifiers.Static | AllowedDeclModifiers.Protected; + string caption = "Predicates"; + if (isFunctionMethod) { + allowed |= AllowedDeclModifiers.Extern; + caption = "Predicate methods"; + } + CheckDeclModifiers(dmod, caption, allowed); .) { Attribute } NoUSIdent @@ -1281,7 +1492,8 @@ /* ----- inductive predicate ----- */ | "inductive" "predicate" (. isIndPredicate = true; .) - (. if (mmod.IsGhost) { SemErr(t, "inductive predicates cannot be declared 'ghost' (they are ghost by default)"); } + (. CheckDeclModifiers(dmod, "Inductive predicates", + AllowedDeclModifiers.AlreadyGhost | AllowedDeclModifiers.Static | AllowedDeclModifiers.Protected); .) { Attribute } NoUSIdent @@ -1295,7 +1507,8 @@ /* ----- copredicate ----- */ | "copredicate" (. isCoPredicate = true; .) - (. if (mmod.IsGhost) { SemErr(t, "copredicates cannot be declared 'ghost' (they are ghost by default)"); } + (. CheckDeclModifiers(dmod, "Copredicates", + AllowedDeclModifiers.AlreadyGhost | AllowedDeclModifiers.Static | AllowedDeclModifiers.Protected); .) { Attribute } NoUSIdent @@ -1312,22 +1525,23 @@ { FunctionSpec } [ FunctionBody ] - (. if (DafnyOptions.O.DisallowSoundnessCheating && body == null && ens.Count > 0 && !Attributes.Contains(attrs, "axiom") && !Attributes.Contains(attrs, "imported")) { + (. if (!isWithinAbstractModule && DafnyOptions.O.DisallowSoundnessCheating && body == null && ens.Count > 0 && + !Attributes.Contains(attrs, "axiom") && !Attributes.Contains(attrs, "imported")) { SemErr(t, "a function with an ensures clause must have a body, unless given the :axiom attribute"); } - + EncodeExternAsAttribute(dmod, ref attrs, id, /* needAxiom */ true); IToken tok = theVerifyThisFile ? id : new IncludeToken(id); if (isPredicate) { - f = new Predicate(tok, id.val, mmod.IsStatic, mmod.IsProtected, !isFunctionMethod, typeArgs, formals, + f = new Predicate(tok, id.val, dmod.IsStatic, dmod.IsProtected, !isFunctionMethod, typeArgs, formals, reqs, reads, ens, new Specification(decreases, null), body, Predicate.BodyOriginKind.OriginalOrInherited, attrs, signatureEllipsis); } else if (isIndPredicate) { - f = new InductivePredicate(tok, id.val, mmod.IsStatic, mmod.IsProtected, typeArgs, formals, + f = new InductivePredicate(tok, id.val, dmod.IsStatic, dmod.IsProtected, typeArgs, formals, reqs, reads, ens, body, attrs, signatureEllipsis); } else if (isCoPredicate) { - f = new CoPredicate(tok, id.val, mmod.IsStatic, mmod.IsProtected, typeArgs, formals, + f = new CoPredicate(tok, id.val, dmod.IsStatic, dmod.IsProtected, typeArgs, formals, reqs, reads, ens, body, attrs, signatureEllipsis); } else { - f = new Function(tok, id.val, mmod.IsStatic, mmod.IsProtected, !isFunctionMethod, typeArgs, formals, returnType, + f = new Function(tok, id.val, dmod.IsStatic, dmod.IsProtected, !isFunctionMethod, typeArgs, formals, returnType, reqs, reads, ens, new Specification(decreases, null), body, attrs, signatureEllipsis); } f.BodyStartTok = bodyStart; @@ -1582,50 +1796,78 @@ Expression suchThat = null; Attributes attrs = null; IToken endTok; + s = dummyStmt; .) [ "ghost" (. isGhost = true; x = t; .) ] "var" (. if (!isGhost) { x = t; } .) - { Attribute } - LocalIdentTypeOptional (. lhss.Add(d); d.Attributes = attrs; attrs = null; .) - { "," - { Attribute } - LocalIdentTypeOptional (. lhss.Add(d); d.Attributes = attrs; attrs = null; .) - } - [ ":=" (. assignTok = t; .) - Rhs (. rhss.Add(r); .) - { "," Rhs (. rhss.Add(r); .) - } - | { Attribute } - ":|" (. assignTok = t; .) - [ IF(la.kind == _assume) /* an Expression can also begin with an "assume", so this says to resolve it to pick up any "assume" here */ - "assume" (. suchThatAssume = t; .) + ( { Attribute } + LocalIdentTypeOptional (. lhss.Add(d); d.Attributes = attrs; attrs = null; .) + { "," + { Attribute } + LocalIdentTypeOptional (. lhss.Add(d); d.Attributes = attrs; attrs = null; .) + } + [ ":=" (. assignTok = t; .) + Rhs (. rhss.Add(r); .) + { "," Rhs (. rhss.Add(r); .) + } + | { Attribute } + ":|" (. assignTok = t; .) + [ IF(la.kind == _assume) /* an Expression can also begin with an "assume", so this says to resolve it to pick up any "assume" here */ + "assume" (. suchThatAssume = t; .) + ] + Expression ] - Expression - ] - SYNC ";" (. endTok = t; .) - (. ConcreteUpdateStatement update; - if (suchThat != null) { - var ies = new List(); - foreach (var lhs in lhss) { - ies.Add(new IdentifierExpr(lhs.Tok, lhs.Name)); - } - update = new AssignSuchThatStmt(assignTok, endTok, ies, suchThat, suchThatAssume, attrs); - } else if (rhss.Count == 0) { - update = null; - } else { - var ies = new List(); - foreach (var lhs in lhss) { - ies.Add(new AutoGhostIdentifierExpr(lhs.Tok, lhs.Name)); + SYNC ";" (. endTok = t; .) + (. ConcreteUpdateStatement update; + if (suchThat != null) { + var ies = new List(); + foreach (var lhs in lhss) { + ies.Add(new IdentifierExpr(lhs.Tok, lhs.Name)); + } + update = new AssignSuchThatStmt(assignTok, endTok, ies, suchThat, suchThatAssume, attrs); + } else if (rhss.Count == 0) { + update = null; + } else { + var ies = new List(); + foreach (var lhs in lhss) { + ies.Add(new AutoGhostIdentifierExpr(lhs.Tok, lhs.Name)); + } + update = new UpdateStmt(assignTok, endTok, ies, rhss); } - update = new UpdateStmt(assignTok, endTok, ies, rhss); - } - s = new VarDeclStmt(x, endTok, lhss, update); - .) + s = new VarDeclStmt(x, endTok, lhss, update); + .) + | "(" (. var letLHSs = new List(); + var letRHSs = new List(); + List arguments = new List(); + CasePattern pat; + Expression e = dummyExpr; + IToken id = t; + .) + [ CasePattern (. arguments.Add(pat); .) + { "," CasePattern (. arguments.Add(pat); .) + } + ] + ")" (. // Parse parenthesis without an identifier as a built in tuple type. + theBuiltIns.TupleType(id, arguments.Count, true); // make sure the tuple type exists + string ctor = BuiltIns.TupleTypeCtorName; //use the TupleTypeCtors + pat = new CasePattern(id, ctor, arguments); + if (isGhost) { pat.Vars.Iter(bv => bv.IsGhost = true); } + letLHSs.Add(pat); + .) + ( ":=" + | { Attribute } + ":|" (. SemErr(pat.tok, "LHS of assign-such-that expression must be variables, not general patterns"); .) + ) + Expression (. letRHSs.Add(e); .) + + ";" + (. s = new LetStmt(e.tok, e.tok, letLHSs, letRHSs); .) + ) . IfStmt = (. Contract.Ensures(Contract.ValueAtReturn(out ifStmt) != null); IToken/*!*/ x; - Expression guard = null; IToken guardEllipsis = null; + Expression guard = null; IToken guardEllipsis = null; bool isExistentialGuard = false; BlockStmt/*!*/ thn; BlockStmt/*!*/ bs; Statement/*!*/ s; @@ -1637,11 +1879,13 @@ "if" (. x = t; .) ( IF(IsAlternative()) - AlternativeBlock + AlternativeBlock (. ifStmt = new AlternativeStmt(x, endTok, alternatives); .) | - ( Guard - | "..." (. guardEllipsis = t; .) + ( IF(IsExistentialGuard()) + ExistentialGuard (. isExistentialGuard = true; .) + | Guard + | "..." (. guardEllipsis = t; .) ) BlockStmt (. endTok = thn.EndTok; .) [ "else" @@ -1650,26 +1894,29 @@ ) ] (. if (guardEllipsis != null) { - ifStmt = new SkeletonStatement(new IfStmt(x, endTok, guard, thn, els), guardEllipsis, null); + ifStmt = new SkeletonStatement(new IfStmt(x, endTok, isExistentialGuard, guard, thn, els), guardEllipsis, null); } else { - ifStmt = new IfStmt(x, endTok, guard, thn, els); + ifStmt = new IfStmt(x, endTok, isExistentialGuard, guard, thn, els); } .) ) . -AlternativeBlock<.out List alternatives, out IToken endTok.> +AlternativeBlock<.bool allowExistentialGuards, out List alternatives, out IToken endTok.> = (. alternatives = new List(); IToken x; - Expression e; + Expression e; bool isExistentialGuard; List body; .) "{" - { "case" (. x = t; .) - Expression // NB: don't allow lambda here + { "case" (. x = t; isExistentialGuard = false; e = dummyExpr; .) + ( IF(allowExistentialGuards && IsExistentialGuard()) + ExistentialGuard (. isExistentialGuard = true; .) // NB: don't allow lambda here + | Expression // NB: don't allow lambda here + ) "=>" (. body = new List(); .) { Stmt } - (. alternatives.Add(new GuardedAlternative(x, e, body)); .) + (. alternatives.Add(new GuardedAlternative(x, isExistentialGuard, e, body)); .) } "}" (. endTok = t; .) . @@ -1693,7 +1940,7 @@ ( IF(IsLoopSpec() || IsAlternative()) { LoopSpec } - AlternativeBlock + AlternativeBlock (. stmt = new AlternativeLoopStmt(x, endTok, invariants, new Specification(decreases, decAttrs), new Specification(mod, modAttrs), alternatives); .) | ( Guard (. Contract.Assume(guard == null || cce.Owner.None(guard)); .) @@ -1773,6 +2020,21 @@ | Expression (. e = ee; .) ) . +ExistentialGuard += (. var bvars = new List(); + BoundVar bv; IToken x; + Attributes attrs = null; + Expression body; + .) + IdentTypeOptional (. bvars.Add(bv); x = bv.tok; .) + { "," + IdentTypeOptional (. bvars.Add(bv); .) + } + { Attribute } + ":|" + Expression + (. e = new ExistsExpr(x, bvars, null, body, attrs); .) + . MatchStmt = (. Contract.Ensures(Contract.ValueAtReturn(out s) != null); Token x; Expression/*!*/ e; MatchCaseStmt/*!*/ c; @@ -1794,17 +2056,25 @@ CaseStatement = (. Contract.Ensures(Contract.ValueAtReturn(out c) != null); IToken/*!*/ x, id; - List arguments = new List(); - BoundVar/*!*/ bv; + List arguments = new List(); + CasePattern/*!*/ pat; List body = new List(); + string/*!*/ name = ""; .) "case" (. x = t; .) - Ident - [ "(" - IdentTypeOptional (. arguments.Add(bv); .) - { "," IdentTypeOptional (. arguments.Add(bv); .) - } - ")" ] + ( Ident (. name = id.val; .) + [ "(" + [ CasePattern (. arguments.Add(pat); .) + { "," CasePattern (. arguments.Add(pat); .) + } + ] + ")" ] + | "(" + CasePattern (. arguments.Add(pat); .) + { "," CasePattern (. arguments.Add(pat); .) + } + ")" + ) "=>" SYNC /* this SYNC and the one inside the loop below are used to avoid problems with the IsNotEndOfCase test. The SYNC will * skip until the next symbol that can legally occur here, which is either the beginning of a Stmt or whatever is allowed @@ -1814,7 +2084,7 @@ Stmt SYNC /* see comment about SYNC above */ } - (. c = new MatchCaseStmt(x, id.val, arguments, body); .) + (. c = new MatchCaseStmt(x, name, arguments, body); .) . /*------------------------------------------------------------------------*/ AssertStmt @@ -1947,6 +2217,7 @@ CalcStmt = (. Contract.Ensures(Contract.ValueAtReturn(out s) != null); Token x; + Attributes attrs = null; CalcStmt.CalcOp op, calcOp = Microsoft.Dafny.CalcStmt.DefaultOp, resOp = Microsoft.Dafny.CalcStmt.DefaultOp; var lines = new List(); var hints = new List(); @@ -1958,6 +2229,7 @@ IToken danglingOperator = null; .) "calc" (. x = t; .) + { IF(IsAttribute()) Attribute } [ CalcOp (. maybeOp = calcOp.ResultOp(calcOp); // guard against non-transitive calcOp (like !=) if (maybeOp == null) { SemErr(opTok, "the main operator of a calculation must be transitive"); @@ -2006,7 +2278,7 @@ // Repeat the last line to create a dummy line for the dangling hint lines.Add(lines[lines.Count - 1]); } - s = new CalcStmt(x, t, calcOp, lines, hints, stepOps, resOp); + s = new CalcStmt(x, t, calcOp, lines, hints, stepOps, resOp, attrs); .) . CalcOp @@ -2100,10 +2372,13 @@ ( ImpliesOp (. x = t; .) ImpliesExpression (. e0 = new BinaryExpr(x, BinaryExpr.Opcode.Imp, e0, e1); .) | ExpliesOp (. x = t; .) - LogicalExpression (. e0 = new BinaryExpr(x, BinaryExpr.Opcode.Exp, e0, e1); .) + LogicalExpression (. // The order of operands is reversed so that it can be turned into implication during resolution + e0 = new BinaryExpr(x, BinaryExpr.Opcode.Exp, e1, e0); .) { IF(IsExpliesOp()) /* read a reverse implication as far as possible */ ExpliesOp (. x = t; .) - LogicalExpression (. e0 = new BinaryExpr(x, BinaryExpr.Opcode.Exp, e0, e1); .) + LogicalExpression (. //The order of operands is reversed so that it can be turned into implication during resolution + e0 = new BinaryExpr(x, BinaryExpr.Opcode.Exp, e1, e0); + .) } ) ] @@ -2317,10 +2592,13 @@ "imap" (. x = t; .) MapDisplayExpr { IF(IsSuffix()) Suffix } + | IF(IsISetDisplay()) /* this alternative must be checked before going into EndlessExpression, where there is another "iset" */ + "iset" (. x = t; .) + ISetDisplayExpr + { IF(IsSuffix()) Suffix } | IF(IsLambda(allowLambda)) LambdaExpression /* this is an endless expression */ | EndlessExpression - | NameSegment { IF(IsSuffix()) Suffix } | DisplayExpr @@ -2425,13 +2703,22 @@ } .) . +ISetDisplayExpr += (. Contract.Ensures(Contract.ValueAtReturn(out e) != null); + List elements = new List();; + e = dummyExpr; + .) + "{" + [ Expressions ] (. e = new SetDisplayExpr(setToken, finite, elements);.) + "}" + . DisplayExpr = (. Contract.Ensures(Contract.ValueAtReturn(out e) != null); IToken x; List elements; e = dummyExpr; .) ( "{" (. x = t; elements = new List(); .) - [ Expressions ] (. e = new SetDisplayExpr(x, elements);.) + [ Expressions ] (. e = new SetDisplayExpr(x, true, elements);.) "}" | "[" (. x = t; elements = new List(); .) [ Expressions ] (. e = new SeqDisplayExpr(x, elements); .) @@ -2496,7 +2783,10 @@ "else" Expression (. e = new ITEExpr(x, e, e0, e1); .) | MatchExpression | QuantifierGuts - | SetComprehensionExpr + | "set" (. x = t; .) + SetComprehensionExpr + | "iset" (. x = t; .) + SetComprehensionExpr | StmtInExpr Expression (. e = new StmtExpr(s.Tok, s, e); .) | LetExpr @@ -2584,19 +2874,27 @@ . CaseExpression = (. Contract.Ensures(Contract.ValueAtReturn(out c) != null); IToken/*!*/ x, id; - List arguments = new List(); - BoundVar/*!*/ bv; + List arguments = new List(); + CasePattern/*!*/ pat; Expression/*!*/ body; + string/*!*/ name = ""; .) "case" (. x = t; .) - Ident - [ "(" - IdentTypeOptional (. arguments.Add(bv); .) - { "," IdentTypeOptional (. arguments.Add(bv); .) - } - ")" ] + ( Ident (. name = id.val; .) + [ "(" + [ CasePattern (. arguments.Add(pat); .) + { "," CasePattern (. arguments.Add(pat); .) + } + ] + ")" ] + | "(" + CasePattern (. arguments.Add(pat); .) + { "," CasePattern (. arguments.Add(pat); .) + } + ")" + ) "=>" - Expression (. c = new MatchCaseExpr(x, id.val, arguments, body); .) + Expression (. c = new MatchCaseExpr(x, name, arguments, body); .) . CasePattern = (. IToken id; List arguments; @@ -2611,7 +2909,18 @@ } ] ")" (. pat = new CasePattern(id, id.val, arguments); .) - + | "(" (. id = t; + arguments = new List(); + .) + [ CasePattern (. arguments.Add(pat); .) + { "," CasePattern (. arguments.Add(pat); .) + } + ] + ")" (. // Parse parenthesis without an identifier as a built in tuple type. + theBuiltIns.TupleType(id, arguments.Count, true); // make sure the tuple type exists + string ctor = BuiltIns.TupleTypeCtorName; //use the TupleTypeCtors + pat = new CasePattern(id, ctor, arguments); + .) | IdentTypeOptional (. // This could be a BoundVar of a parameter-less constructor and we may not know until resolution. // Nevertheless, we do put the "bv" into the CasePattern here (even though it will get thrown out // later if resolution finds the CasePattern to denote a parameter-less constructor), because this @@ -2683,8 +2992,17 @@ Expression e0 = null; Expression e1 = null; Expression ee; bool anyDots = false; List multipleLengths = null; bool takeRest = false; // takeRest is relevant only if multipleLengths is non-null List multipleIndices = null; + List> updates; + Expression v; .) - ( DotSuffix (. if (x != null) { + ( "." + ( "(" (. x = t; updates = new List>(); .) + MemberBindingUpdate (. updates.Add(Tuple.Create(id, id.val, v)); .) + { "," MemberBindingUpdate (. updates.Add(Tuple.Create(id, id.val, v)); .) + } + ")" + (. e = new DatatypeUpdateExpr(x, e, updates); .) + | DotSuffix (. if (x != null) { // process id as a Suffix in its own right e = new ExprDotName(id, e, id.val, null); id = x; // move to the next Suffix @@ -2693,17 +3011,18 @@ .) - ( IF(IsGenericInstantiation()) - (. typeArgs = new List(); .) - GenericInstantiation - | HashCall - | /* empty */ + ( IF(IsGenericInstantiation()) + (. typeArgs = new List(); .) + GenericInstantiation + | HashCall + | /* empty */ + ) + (. e = new ExprDotName(id, e, id.val, typeArgs); + if (openParen != null) { + e = new ApplySuffix(openParen, e, args); + } + .) ) - (. e = new ExprDotName(id, e, id.val, typeArgs); - if (openParen != null) { - e = new ApplySuffix(openParen, e, args); - } - .) | "[" (. x = t; .) ( Expression (. e0 = ee; .) ( ".." (. anyDots = true; .) @@ -2817,16 +3136,14 @@ ] . -SetComprehensionExpr +SetComprehensionExpr = (. Contract.Ensures(Contract.ValueAtReturn(out q) != null); - IToken x = Token.NoToken; BoundVar bv; List bvars = new List(); Expression range; Expression body = null; Attributes attrs = null; .) - "set" (. x = t; .) IdentTypeOptional (. bvars.Add(bv); .) { "," IdentTypeOptional (. bvars.Add(bv); .) @@ -2838,7 +3155,7 @@ Expression ] (. if (body == null && bvars.Count != 1) { SemErr(t, "a set comprehension with more than one bound variable must have a term expression"); } - q = new SetComprehension(x, bvars, range, body, attrs); + q = new SetComprehension(setToken, finite, bvars, range, body, attrs); .) . Expressions<.List args.> @@ -2849,10 +3166,10 @@ . /*------------------------------------------------------------------------*/ Attribute -= (. string name; += (. IToken x; string name; var args = new List(); .) - "{" ":" ident (. name = t.val; .) + "{" ":" NoUSIdent (. name = x.val; .) [ Expressions ] "}" (. attrs = new Attributes(name, args, attrs); .) @@ -2863,10 +3180,10 @@ ident (. x = t; .) . // Identifier or sequence of digits -// Parse one of the following: -// . ident -// . digits -// . digits . digits +// Parse one of the following, which are supposed to follow a ".": +// ident +// digits +// digits . digits // In the first two cases, x returns as the token for the ident/digits and y returns as null. // In the third case, x and y return as the tokens for the first and second digits. // This parser production solves a problem where the scanner might parse a real number instead @@ -2876,7 +3193,6 @@ x = Token.NoToken; y = null; .) - "." ( ident (. x = t; .) | digits (. x = t; .) | decimaldigits (. x = t; @@ -2902,6 +3218,15 @@ | "reads" (. x = t; .) ) . +MemberBindingUpdate += (. id = Token.NoToken; e = dummyExpr; .) + ( ident (. id = t; .) + | digits (. id = t; .) + ) + ":=" + Expression + . + // Identifier, disallowing leading underscores NoUSIdent = (. Contract.Ensures(Contract.ValueAtReturn(out x) != null); .) @@ -2933,7 +3258,7 @@ ( digits (. S = Util.RemoveUnderscores(t.val); try { - n = BigInteger.Parse(S); + n = BigIntegerParser.Parse(S); } catch (System.FormatException) { SemErr("incorrectly formatted number"); n = BigInteger.Zero; @@ -2943,7 +3268,7 @@ (. S = Util.RemoveUnderscores(t.val.Substring(2)); try { // note: leading 0 required when parsing positive hex numbers - n = BigInteger.Parse("0" + S, System.Globalization.NumberStyles.HexNumber); + n = BigIntegerParser.Parse("0" + S, System.Globalization.NumberStyles.HexNumber); } catch (System.FormatException) { SemErr("incorrectly formatted number"); n = BigInteger.Zero; diff -Nru dafny-1.9.5/Source/Dafny/DafnyMain.cs dafny-1.9.7/Source/Dafny/DafnyMain.cs --- dafny-1.9.5/Source/Dafny/DafnyMain.cs 2015-05-11 08:03:26.000000000 +0000 +++ dafny-1.9.7/Source/Dafny/DafnyMain.cs 2016-06-05 21:11:14.000000000 +0000 @@ -12,7 +12,7 @@ namespace Microsoft.Dafny { public class Main { - private static void MaybePrintProgram(Program program, string filename) + private static void MaybePrintProgram(Program program, string filename, bool afterResolver) { if (filename != null) { TextWriter tw; @@ -22,14 +22,14 @@ tw = new System.IO.StreamWriter(filename); } Printer pr = new Printer(tw, DafnyOptions.O.PrintMode); - pr.PrintProgram(program); + pr.PrintProgram(program, afterResolver); } } /// /// Returns null on success, or an error string otherwise. /// - public static string ParseCheck(IList/*!*/ fileNames, string/*!*/ programName, out Program program) + public static string ParseCheck(IList/*!*/ fileNames, string/*!*/ programName, ErrorReporter reporter, out Program program) //modifies Bpl.CommandLineOptions.Clo.XmlSink.*; { Contract.Requires(programName != null); @@ -47,31 +47,31 @@ Console.WriteLine("Parsing " + dafnyFileName); } - string err = ParseFile(dafnyFileName, Bpl.Token.NoToken, module, builtIns, new Errors()); + string err = ParseFile(dafnyFileName, Bpl.Token.NoToken, module, builtIns, new Errors(reporter)); if (err != null) { return err; } } if (!DafnyOptions.O.DisallowIncludes) { - string errString = ParseIncludes(module, builtIns, fileNames, new Errors()); + string errString = ParseIncludes(module, builtIns, fileNames, new Errors(reporter)); if (errString != null) { return errString; } } - program = new Program(programName, module, builtIns); + program = new Program(programName, module, builtIns, reporter); - MaybePrintProgram(program, DafnyOptions.O.DafnyPrintFile); + MaybePrintProgram(program, DafnyOptions.O.DafnyPrintFile, false); if (Bpl.CommandLineOptions.Clo.NoResolve || Bpl.CommandLineOptions.Clo.NoTypecheck) { return null; } Dafny.Resolver r = new Dafny.Resolver(program); r.ResolveProgram(program); - MaybePrintProgram(program, DafnyOptions.O.DafnyPrintResolvedFile); + MaybePrintProgram(program, DafnyOptions.O.DafnyPrintResolvedFile, true); - if (r.ErrorCount != 0) { - return string.Format("{0} resolution/type errors detected in {1}", r.ErrorCount, program.Name); + if (reporter.Count(ErrorLevel.Error) != 0) { + return string.Format("{0} resolution/type errors detected in {1}", reporter.Count(ErrorLevel.Error), program.Name); } return null; // success diff -Nru dafny-1.9.5/Source/Dafny/DafnyOptions.cs dafny-1.9.7/Source/Dafny/DafnyOptions.cs --- dafny-1.9.5/Source/Dafny/DafnyOptions.cs 2015-05-11 08:03:26.000000000 +0000 +++ dafny-1.9.7/Source/Dafny/DafnyOptions.cs 2016-06-05 21:11:14.000000000 +0000 @@ -9,18 +9,26 @@ { public class DafnyOptions : Bpl.CommandLineOptions { - public DafnyOptions() + private ErrorReporter errorReporter; + + public DafnyOptions(ErrorReporter errorReporter = null) : base("Dafny", "Dafny program verifier") { + this.errorReporter = errorReporter; + SetZ3ExecutableName(); } public override string VersionNumber { get { - return System.Diagnostics.FileVersionInfo.GetVersionInfo(System.Reflection.Assembly.GetExecutingAssembly().Location).FileVersion; + return System.Diagnostics.FileVersionInfo.GetVersionInfo(System.Reflection.Assembly.GetExecutingAssembly().Location).FileVersion +#if ENABLE_IRONDAFNY + + "[IronDafny]" +#endif + ; } } public override string VersionSuffix { get { - return " version " + VersionNumber + ", Copyright (c) 2003-2015, Microsoft."; + return " version " + VersionNumber + ", Copyright (c) 2003-2016, Microsoft."; } } @@ -35,6 +43,7 @@ Bpl.CommandLineOptions.Install(options); } + public bool UnicodeOutput = false; public bool DisallowSoundnessCheating = false; public bool Dafnycc = false; public int Induction = 3; @@ -42,9 +51,10 @@ public string DafnyPrelude = null; public string DafnyPrintFile = null; public enum PrintModes { Everything, NoIncludes, NoGhost }; - public PrintModes PrintMode; + public PrintModes PrintMode = PrintModes.Everything; // Default to printing everything public bool DafnyVerify = true; public string DafnyPrintResolvedFile = null; + public List DafnyPrintExportedViews = new List(); public bool Compile = true; public bool ForceCompile = false; public bool RunAfterCompile = false; @@ -54,6 +64,21 @@ public string AutoReqPrintFile = null; public bool ignoreAutoReq = false; public bool AllowGlobals = false; + public bool CountVerificationErrors = true; + public bool Optimize = false; + public bool AutoTriggers = true; + public bool RewriteFocalPredicates = true; + public bool PrintTooltips = false; + public bool PrintStats = false; + public bool PrintFunctionCallGraph = false; + public bool WarnShadowing = false; + public bool IronDafny = +#if ENABLE_IRONDAFNY + true +#else + false +#endif + ; protected override bool ParseOption(string name, Bpl.CommandLineOptionEngine.CommandLineParseState ps) { var args = ps.args; // convenient synonym @@ -96,6 +121,11 @@ DafnyPrintResolvedFile = args[ps.i]; } return true; + case "view": + if (ps.ConfirmArgumentCount(1)) { + DafnyPrintExportedViews = args[ps.i].Split(',').ToList(); + } + return true; case "compile": { int compile = 0; @@ -154,7 +184,7 @@ case "noNLarith": DisableNLarith = true; - this.AddZ3Option("NL_ARITH=false"); + this.AddZ3Option("smt.arith.nl=false"); return true; case "autoReqPrint": @@ -170,6 +200,61 @@ case "allowGlobals": AllowGlobals = true; return true; + + case "stats": + PrintStats = true; + return true; + + case "funcCallGraph": + PrintFunctionCallGraph = true; + return true; + + case "warnShadowing": + WarnShadowing = true; + return true; + + case "countVerificationErrors": { + int countErrors = 1; // defaults to reporting verification errors + if (ps.GetNumericArgument(ref countErrors, 2)) { + CountVerificationErrors = countErrors == 1; + } + return true; + } + + case "printTooltips": + PrintTooltips = true; + return true; + + case "autoTriggers": { + int autoTriggers = 0; + if (ps.GetNumericArgument(ref autoTriggers, 2)) { + AutoTriggers = autoTriggers == 1; + } + return true; + } + + case "rewriteFocalPredicates": { + int rewriteFocalPredicates = 0; + if (ps.GetNumericArgument(ref rewriteFocalPredicates, 2)) { + RewriteFocalPredicates = rewriteFocalPredicates == 1; + } + return true; + } + + case "optimize": { + Optimize = true; + return true; + } + + case "noIronDafny": { + IronDafny = false; + return true; + } + + case "ironDafny": { + IronDafny = true; + return true; + } default: break; @@ -190,6 +275,39 @@ // TODO: provide attribute help here } + + /// + /// Dafny comes with it's own copy of z3, to save new users the trouble of having to install extra dependency. + /// For this to work, Dafny makes the Z3ExecutablePath point to the path were Z3 is put by our release script. + /// For developers though (and people getting this from source), it's convenient to be able to run right away, + /// so we vendor a Windows version. + /// + private void SetZ3ExecutableName() { + var platform = (int)System.Environment.OSVersion.Platform; + + // http://www.mono-project.com/docs/faq/technical/ + var isUnix = platform == 4 || platform == 128; + + var z3binName = isUnix ? "z3" : "z3.exe"; + var dafnyBinDir = System.IO.Path.GetDirectoryName(System.Reflection.Assembly.GetExecutingAssembly().Location); + var z3BinDir = System.IO.Path.Combine(dafnyBinDir, "z3", "bin"); + var z3BinPath = System.IO.Path.Combine(z3BinDir, z3binName); + + if (!System.IO.File.Exists(z3BinPath) && !isUnix) { + // This is most likely a Windows user running from source without downloading z3 + // separately; this is ok, since we vendor z3.exe. + z3BinPath = System.IO.Path.Combine(dafnyBinDir, z3binName); + } + + if (!System.IO.File.Exists(z3BinPath) && errorReporter != null) { + var tok = new Bpl.Token(1, 1) { filename = "*** " }; + errorReporter.Warning(MessageSource.Other, tok, "Could not find '{0}' in '{1}'.{2}Downloading and extracting a Z3 distribution to Dafny's 'Binaries' folder would solve this issue; for now, we'll rely on Boogie to find Z3.", + z3binName, z3BinDir, System.Environment.NewLine); + } else { + Z3ExecutablePath = z3BinPath; + } + } + public override void Usage() { Console.WriteLine(@" ---- Dafny options --------------------------------------------------------- @@ -202,6 +320,7 @@ print Dafny program after parsing it (use - as to print to console) /printMode: + Everything is the default. NoIncludes disables printing of {:verify false} methods incorporated via the include mechanism, as well as datatypes and fields included from other files. NoGhost disables printing of functions, ghost methods, and proof statements in @@ -209,6 +328,11 @@ /rprint: print Dafny program after resolving it (use - as to print to console) + /view: + print the filtered views of a module after it is resolved (/rprint). + if print before the module is resolved (/dprint), then everthing in the module is printed + if no view is specified, then everything in the module is printed. + /dafnyVerify: 0 - stop after typechecking 1 - continue on to translation, verification, and compilation @@ -237,7 +361,7 @@ 2 - apply induction as requested (by attributes) and also for heuristically chosen quantifiers 3 (default) - apply induction as requested, and for - heuristically chosen quantifiers and ghost methods + heuristically chosen quantifiers and lemmas /inductionHeuristic: 0 - least discriminating induction heuristic (that is, lean toward applying induction more often) @@ -245,17 +369,47 @@ how discriminating they are: 0 < 1 < 2 < (3,4) < 5 < 6 6 (default) - most discriminating /noIncludes Ignore include directives - /noNLarith Reduce Z3's knowledge of non-linear arithmetic (*,/,%). + /noNLarith Reduce Z3's knowledge of non-linear arithmetic (*,/,%). Results in more manual work, but also produces more predictable behavior. /autoReqPrint: Print out requirements that were automatically generated by autoReq. /noAutoReq Ignore autoReq attributes /allowGlobals Allow the implicit class '_default' to contain fields, instance functions, and instance methods. These class members are declared at the module scope, - outside of explicit classes. This command-line option is provided to simply + outside of explicit classes. This command-line option is provided to simplify a transition from the behavior in the language prior to version 1.9.3, from which point onward all functions and methods declared at the module scope are implicitly static and fields declarations are not allowed at the module scope. + /countVerificationErrors: + 0 - If preprocessing succeeds, set exit code to 0 regardless of the number + of verification errors. + 1 (default) - If preprocessing succeeds, set exit code to the number of + verification errors. + /autoTriggers: + 0 - Do not generate {:trigger} annotations for user-level quantifiers. + 1 (default) - Add a {:trigger} to each user-level quantifier. Existing + annotations are preserved. + /rewriteFocalPredicates: + 0 - Don't rewrite predicates in the body of prefix lemmas. + 1 (default) - In the body of prefix lemmas, rewrite any use of a focal predicate + P to P#[_k-1]. + /optimize Produce optimized C# code, meaning: + - selects optimized C# prelude by passing + /define:DAFNY_USE_SYSTEM_COLLECTIONS_IMMUTABLE to csc.exe (requires + System.Collections.Immutable.dll in the source directory to successfully + compile). + - passes /optimize flag to csc.exe. + /stats Print interesting statistics about the Dafny files supplied. + /funcCallGraph Print out the function call graph. Format is: func,mod=callee* + /warnShadowing Emits a warning if the name of a declared variable caused another variable + to be shadowed + /ironDafny Enable experimental features needed to support Ironclad/Ironfleet. Use of + these features may cause your code to become incompatible with future + releases of Dafny. + /noIronDafny Disable Ironclad/Ironfleet features, if enabled by default. + /printTooltips + Dump additional positional information (displayed as mouse-over tooltips by + the VS plugin) to stdout as 'Info' messages. "); base.Usage(); // also print the Boogie options } diff -Nru dafny-1.9.5/Source/Dafny/DafnyPipeline.csproj dafny-1.9.7/Source/Dafny/DafnyPipeline.csproj --- dafny-1.9.5/Source/Dafny/DafnyPipeline.csproj 2015-05-11 08:03:26.000000000 +0000 +++ dafny-1.9.7/Source/Dafny/DafnyPipeline.csproj 2016-06-05 21:11:14.000000000 +0000 @@ -1,4 +1,4 @@ - + Debug @@ -8,7 +8,7 @@ {FE44674A-1633-4917-99F4-57635E6FA740} Library Properties - DafnyPipeline + Microsoft.Dafny DafnyPipeline v4.0 512 @@ -41,7 +41,7 @@ full false bin\Debug\ - DEBUG;TRACE + TRACE;DEBUG;NO_ENABLE_IRONDAFNY prompt 4 False @@ -83,7 +83,7 @@ pdbonly true bin\Release\ - TRACE + TRACE;NO_ENABLE_IRONDAFNY prompt 4 AllRules.ruleset @@ -91,7 +91,7 @@ true bin\Checked\ - DEBUG;TRACE + TRACE;DEBUG;NO_ENABLE_IRONDAFNY full AnyCPU prompt @@ -143,8 +143,16 @@ + + + + + + + + @@ -191,4 +199,4 @@ --> - + \ No newline at end of file diff -Nru dafny-1.9.5/Source/Dafny/Makefile dafny-1.9.7/Source/Dafny/Makefile --- dafny-1.9.5/Source/Dafny/Makefile 2015-05-11 08:03:26.000000000 +0000 +++ dafny-1.9.7/Source/Dafny/Makefile 2016-06-05 21:11:14.000000000 +0000 @@ -4,8 +4,8 @@ # from http://boogiepartners.codeplex.com/. Update the FRAME_DIR variable to # point to whatever directory you install that into. # ############################################################################### -COCO_EXE_DIR = ..\..\..\boogiepartners\CocoRdownload -FRAME_DIR = ..\..\..\boogiepartners\CocoR\Modified +COCO_EXE_DIR = ..\..\..\boogie-partners\CocoR\bin +FRAME_DIR = ..\..\..\boogie-partners\CocoR\Modified COCO = $(COCO_EXE_DIR)\Coco.exe # "all" depends on 2 files, really (Parser.cs and Scanner.cs), but they diff -Nru dafny-1.9.5/Source/Dafny/Parser.cs dafny-1.9.7/Source/Dafny/Parser.cs --- dafny-1.9.5/Source/Dafny/Parser.cs 2015-05-11 08:03:26.000000000 +0000 +++ dafny-1.9.7/Source/Dafny/Parser.cs 2016-06-05 21:11:14.000000000 +0000 @@ -27,76 +27,181 @@ public const int _object = 11; public const int _string = 12; public const int _set = 13; - public const int _multiset = 14; - public const int _seq = 15; - public const int _map = 16; - public const int _imap = 17; - public const int _charToken = 18; - public const int _stringToken = 19; - public const int _colon = 20; - public const int _comma = 21; - public const int _verticalbar = 22; - public const int _doublecolon = 23; - public const int _bullet = 24; - public const int _dot = 25; - public const int _semi = 26; - public const int _darrow = 27; - public const int _arrow = 28; - public const int _assume = 29; - public const int _calc = 30; - public const int _case = 31; - public const int _then = 32; - public const int _else = 33; - public const int _decreases = 34; - public const int _invariant = 35; - public const int _function = 36; - public const int _predicate = 37; - public const int _inductive = 38; - public const int _lemma = 39; - public const int _copredicate = 40; - public const int _modifies = 41; - public const int _reads = 42; - public const int _requires = 43; - public const int _lbrace = 44; - public const int _rbrace = 45; - public const int _lbracket = 46; - public const int _rbracket = 47; - public const int _openparen = 48; - public const int _closeparen = 49; - public const int _openAngleBracket = 50; - public const int _closeAngleBracket = 51; - public const int _eq = 52; - public const int _neq = 53; - public const int _neqAlt = 54; - public const int _star = 55; - public const int _notIn = 56; - public const int _ellipsis = 57; - public const int maxT = 136; + public const int _iset = 14; + public const int _multiset = 15; + public const int _seq = 16; + public const int _map = 17; + public const int _imap = 18; + public const int _charToken = 19; + public const int _stringToken = 20; + public const int _colon = 21; + public const int _comma = 22; + public const int _verticalbar = 23; + public const int _doublecolon = 24; + public const int _boredSmiley = 25; + public const int _bullet = 26; + public const int _dot = 27; + public const int _semi = 28; + public const int _darrow = 29; + public const int _arrow = 30; + public const int _assume = 31; + public const int _calc = 32; + public const int _case = 33; + public const int _then = 34; + public const int _else = 35; + public const int _decreases = 36; + public const int _invariant = 37; + public const int _function = 38; + public const int _predicate = 39; + public const int _inductive = 40; + public const int _lemma = 41; + public const int _copredicate = 42; + public const int _modifies = 43; + public const int _reads = 44; + public const int _requires = 45; + public const int _lbrace = 46; + public const int _rbrace = 47; + public const int _lbracket = 48; + public const int _rbracket = 49; + public const int _openparen = 50; + public const int _closeparen = 51; + public const int _openAngleBracket = 52; + public const int _closeAngleBracket = 53; + public const int _eq = 54; + public const int _neq = 55; + public const int _neqAlt = 56; + public const int _star = 57; + public const int _notIn = 58; + public const int _ellipsis = 59; + public const int maxT = 140; const bool _T = true; const bool _x = false; const int minErrDist = 2; - public Scanner/*!*/ scanner; - public Errors/*!*/ errors; + public Scanner scanner; + public Errors errors; - public Token/*!*/ t; // last recognized token - public Token/*!*/ la; // lookahead token + public Token t; // last recognized token + public Token la; // lookahead token int errDist = minErrDist; readonly Expression/*!*/ dummyExpr; readonly AssignmentRhs/*!*/ dummyRhs; -readonly FrameExpression/*!*/ dummyFrameExpr; +readonly FrameExpression/*!*/ dummyFrameExpr; readonly Statement/*!*/ dummyStmt; readonly ModuleDecl theModule; readonly BuiltIns theBuiltIns; readonly bool theVerifyThisFile; int anonymousIds = 0; -struct MemberModifiers { +/// +/// Holds the modifiers given for a declaration +/// +/// Not all modifiers are applicable to all kinds of declarations. +/// Errors are given when a modify does not apply. +/// We also record the tokens for the specified modifiers so that +/// they can be used in error messages. +/// +struct DeclModifierData { + public bool IsAbstract; + public IToken AbstractToken; public bool IsGhost; + public IToken GhostToken; public bool IsStatic; + public IToken StaticToken; public bool IsProtected; + public IToken ProtectedToken; + public bool IsExtern; + public IToken ExternToken; + public StringLiteralExpr ExternName; + +} + +// Check that token has not been set, then set it. +public void CheckAndSetToken(ref IToken token) +{ + if (token != null) { + SemErr(t, "Duplicate declaration modifier: " + t.val); + } + token = t; +} + +/// +// A flags type used to tell what declaration modifiers are allowed for a declaration. +/// +[Flags] +enum AllowedDeclModifiers { + None = 0, + Abstract = 1, + Ghost = 2, + + // Means ghost not allowed because already implicitly ghost. + AlreadyGhost = 4, + Static = 8, + Protected = 16, + Extern = 32 +}; + +/// +/// Check the declaration modifiers against those that are allowed. +/// +/// The 'allowed' parameter specifies which declaratio modifiers are allowed. +/// The 'declCaption' parameter should be a string describing the kind of declaration. +/// It is used in error messages. +/// Any declaration modifiers that are present but not allowed are cleared. +/// +void CheckDeclModifiers(DeclModifierData dmod, string declCaption, AllowedDeclModifiers allowed) +{ + if (dmod.IsAbstract && ((allowed & AllowedDeclModifiers.Abstract) == 0)) { + SemErr(dmod.AbstractToken, declCaption + " cannot be declared 'abstract'."); + dmod.IsAbstract = false; + } + if (dmod.IsGhost) { + if ((allowed & AllowedDeclModifiers.AlreadyGhost) != 0) { + SemErr(dmod.GhostToken, declCaption + " cannot be declared ghost (they are 'ghost' by default)."); + dmod.IsGhost = false; + } else if ((allowed & AllowedDeclModifiers.Ghost) == 0) { + SemErr(dmod.GhostToken, declCaption + " cannot be declared 'ghost'."); + dmod.IsGhost = false; + } + } + if (dmod.IsStatic && ((allowed & AllowedDeclModifiers.Static) == 0)) { + SemErr(dmod.StaticToken, declCaption + " cannot be declared 'static'."); + dmod.IsStatic = false; + } + if (dmod.IsProtected && ((allowed & AllowedDeclModifiers.Protected) == 0)) { + SemErr(dmod.ProtectedToken, declCaption + " cannot be declared 'protected'."); + dmod.IsProtected = false; + } + if (dmod.IsExtern && ((allowed & AllowedDeclModifiers.Extern) == 0)) { + SemErr(dmod.ExternToken, declCaption + " cannot be declared 'extern'."); + dmod.IsExtern = false; + } +} + +/// +/// Encode an 'extern' declaration modifier as an {:extern name} attribute. +/// +/// We also include an {:axiom} attribute since the specification of an +/// external entity is assumed to hold, but only for methods or functions. +/// +static void EncodeExternAsAttribute(DeclModifierData dmod, ref Attributes attrs, IToken/*!*/ id, bool needAxiom) { + if (dmod.IsExtern) { + StringLiteralExpr name = dmod.ExternName; + if (name == null) { + bool isVerbatimString = false; + name = new StringLiteralExpr(id, id.val, isVerbatimString); + } + var args = new List(); + args.Add(name); + attrs = new Attributes("extern", args, attrs); + + // Also 'extern' implies 'axiom' for methods or functions. + if (needAxiom) { + attrs = new Attributes("axiom", new List(), attrs); + } + } } /// @@ -111,11 +216,11 @@ string s; if (filename == "stdin.dfy") { s = Microsoft.Boogie.ParserHelper.Fill(System.Console.In, new List()); - return Parse(s, filename, module, builtIns, errors, verifyThisFile); + return Parse(s, filename, filename, module, builtIns, errors, verifyThisFile); } else { using (System.IO.StreamReader reader = new System.IO.StreamReader(filename)) { s = Microsoft.Boogie.ParserHelper.Fill(reader, new List()); - return Parse(s, DafnyOptions.Clo.UseBaseNameForFileName ? Path.GetFileName(filename) : filename, module, builtIns, errors, verifyThisFile); + return Parse(s, filename, DafnyOptions.Clo.UseBaseNameForFileName ? Path.GetFileName(filename) : filename, module, builtIns, errors, verifyThisFile); } } } @@ -125,12 +230,12 @@ /// Returns the number of parsing errors encountered. /// Note: first initialize the Scanner. /// -public static int Parse (string/*!*/ s, string/*!*/ filename, ModuleDecl module, BuiltIns builtIns, bool verifyThisFile=true) { +public static int Parse (string/*!*/ s, string/*!*/ fullFilename, string/*!*/ filename, ModuleDecl module, BuiltIns builtIns, ErrorReporter reporter, bool verifyThisFile=true) { Contract.Requires(s != null); Contract.Requires(filename != null); Contract.Requires(module != null); - Errors errors = new Errors(); - return Parse(s, filename, module, builtIns, errors, verifyThisFile); + Errors errors = new Errors(reporter); + return Parse(s, fullFilename, filename, module, builtIns, errors, verifyThisFile); } /// /// Parses top-level things (modules, classes, datatypes, class members) @@ -138,18 +243,18 @@ /// Returns the number of parsing errors encountered. /// Note: first initialize the Scanner with the given Errors sink. /// -public static int Parse (string/*!*/ s, string/*!*/ filename, ModuleDecl module, BuiltIns builtIns, - Errors/*!*/ errors, bool verifyThisFile=true) { +public static int Parse (string/*!*/ s, string/*!*/ fullFilename, string/*!*/ filename, ModuleDecl module, + BuiltIns builtIns, Errors/*!*/ errors, bool verifyThisFile=true) { Contract.Requires(s != null); Contract.Requires(filename != null); Contract.Requires(module != null); Contract.Requires(errors != null); byte[]/*!*/ buffer = cce.NonNull( UTF8Encoding.Default.GetBytes(s)); MemoryStream ms = new MemoryStream(buffer,false); - Scanner scanner = new Scanner(ms, errors, filename); + Scanner scanner = new Scanner(ms, errors, fullFilename, filename); Parser parser = new Parser(scanner, errors, module, builtIns, verifyThisFile); parser.Parse(); - return parser.errors.count; + return parser.errors.ErrorCount; } public Parser(Scanner/*!*/ scanner, Errors/*!*/ errors, ModuleDecl module, BuiltIns builtIns, bool verifyThisFile=true) : this(scanner, errors) // the real work @@ -174,6 +279,25 @@ return la.kind == _lbrace && x.kind == _case; } +// an existential guard starts with an identifier and is then followed by +// * a colon (if the first identifier is given an explicit type), +// * a comma (if there's a list a bound variables and the first one is not given an explicit type), +// * a start-attribute (if there's one bound variable and it is not given an explicit type and there are attributes), or +// * a bored smiley (if there's one bound variable and it is not given an explicit type). +bool IsExistentialGuard() { + scanner.ResetPeek(); + if (la.kind == _ident) { + Token x = scanner.Peek(); + if (x.kind == _colon || x.kind == _comma || x.kind == _boredSmiley) { + return true; + } else if (x.kind == _lbrace) { + x = scanner.Peek(); + return x.kind == _colon; + } + } + return false; +} + bool IsLoopSpec() { return la.kind == _invariant | la.kind == _decreases | la.kind == _modifies; } @@ -245,6 +369,9 @@ bool IsIMapDisplay() { return la.kind == _imap && scanner.Peek().kind == _lbracket; } +bool IsISetDisplay() { + return la.kind == _iset && scanner.Peek().kind == _lbrace; +} bool IsSuffix() { return la.kind == _dot || la.kind == _lbracket || la.kind == _openparen; @@ -366,6 +493,9 @@ return false; } } +/* Returns true if the next thing is of the form: + * "<" Type { "," Type } ">" + */ bool IsTypeList(ref IToken pt) { if (pt.kind != _openAngleBracket) { return false; @@ -373,6 +503,10 @@ pt = scanner.Peek(); return IsTypeSequence(ref pt, _closeAngleBracket); } +/* Returns true if the next thing is of the form: + * Type { "," Type } + * followed by an endBracketKind. + */ bool IsTypeSequence(ref IToken pt, int endBracketKind) { while (true) { if (!IsType(ref pt)) { @@ -404,12 +538,13 @@ return true; case _arrayToken: case _set: + case _iset: case _multiset: case _seq: case _map: case _imap: pt = scanner.Peek(); - return IsTypeList(ref pt); + return pt.kind != _openAngleBracket || IsTypeList(ref pt); case _ident: while (true) { // invariant: next token is an ident @@ -428,19 +563,31 @@ } case _openparen: pt = scanner.Peek(); + if (pt.kind == _closeparen) { + // end of type list + pt = scanner.Peek(); + return true; + } return IsTypeSequence(ref pt, _closeparen); default: return false; } } + +bool IsDefaultImport() { + scanner.ResetPeek(); + Token x = scanner.Peek(); // lookahead token again + return la.val == "default" && x.val != "export"; +} + /*--------------------------------------------------------------------------*/ - public Parser(Scanner/*!*/ scanner, Errors/*!*/ errors) { + public Parser(Scanner scanner, Errors errors) { this.scanner = scanner; this.errors = errors; - Token/*!*/ tok = new Token(); + Token tok = new Token(); tok.val = ""; this.la = tok; this.t = new Token(); // just to satisfy its non-null constraint @@ -451,13 +598,13 @@ errDist = 0; } - public void SemErr (string/*!*/ msg) { + public void SemErr (string msg) { Contract.Requires(msg != null); if (errDist >= minErrDist) errors.SemErr(t, msg); errDist = 0; } - public void SemErr(IToken/*!*/ tok, string/*!*/ msg) { + public void SemErr(IToken tok, string msg) { Contract.Requires(tok != null); Contract.Requires(msg != null); errors.SemErr(tok, msg); @@ -506,20 +653,17 @@ void Dafny() { - ClassDecl/*!*/ c; DatatypeDecl/*!*/ dt; TopLevelDecl td; IteratorDecl iter; List membersDefaultClass = new List(); - ModuleDecl submodule; // to support multiple files, create a default module only if theModule is null DefaultModuleDecl defaultModule = (DefaultModuleDecl)((LiteralModuleDecl)theModule).ModuleDef; // theModule should be a DefaultModuleDecl (actually, the singular DefaultModuleDecl) - TraitDecl/*!*/ trait; Contract.Assert(defaultModule != null); - while (la.kind == 58) { + while (la.kind == 60) { Get(); - Expect(19); + Expect(20); { - string parsedFile = t.filename; + string parsedFile = scanner.FullFilename; bool isVerbatimString; string includedFile = Util.RemoveParsedStringQuotes(t.val, out isVerbatimString); includedFile = Util.RemoveEscaping(includedFile, isVerbatimString); @@ -534,47 +678,7 @@ } while (StartOf(1)) { - switch (la.kind) { - case 59: case 60: case 62: { - SubModuleDecl(defaultModule, out submodule); - defaultModule.TopLevelDecls.Add(submodule); - break; - } - case 67: { - ClassDecl(defaultModule, out c); - defaultModule.TopLevelDecls.Add(c); - break; - } - case 73: case 74: { - DatatypeDecl(defaultModule, out dt); - defaultModule.TopLevelDecls.Add(dt); - break; - } - case 76: { - NewtypeDecl(defaultModule, out td); - defaultModule.TopLevelDecls.Add(td); - break; - } - case 77: { - OtherTypeDecl(defaultModule, out td); - defaultModule.TopLevelDecls.Add(td); - break; - } - case 78: { - IteratorDecl(defaultModule, out iter); - defaultModule.TopLevelDecls.Add(iter); - break; - } - case 69: { - TraitDecl(defaultModule, out trait); - defaultModule.TopLevelDecls.Add(trait); - break; - } - case 36: case 37: case 38: case 39: case 40: case 70: case 71: case 72: case 75: case 81: case 82: case 83: case 84: { - ClassMemberDecl(membersDefaultClass, false, !DafnyOptions.O.AllowGlobals); - break; - } - } + TopDecl(defaultModule, membersDefaultClass, /* isTopLevel */ true, /* isAbstract */ false); } DefaultClassDecl defaultClass = null; foreach (TopLevelDecl topleveldecl in defaultModule.TopLevelDecls) { @@ -591,106 +695,162 @@ Expect(0); } - void SubModuleDecl(ModuleDefinition parent, out ModuleDecl submodule) { + void TopDecl(ModuleDefinition module, List membersDefaultClass, bool isTopLevel, bool isAbstract ) { + DeclModifierData dmod = new DeclModifierData(); ModuleDecl submodule; ClassDecl/*!*/ c; DatatypeDecl/*!*/ dt; TopLevelDecl td; IteratorDecl iter; - Attributes attrs = null; IToken/*!*/ id; TraitDecl/*!*/ trait; + + while (StartOf(2)) { + DeclModifier(ref dmod); + } + switch (la.kind) { + case 66: case 69: case 73: case 74: { + SubModuleDecl(dmod, module, out submodule); + module.TopLevelDecls.Add(submodule); + break; + } + case 77: { + ClassDecl(dmod, module, out c); + module.TopLevelDecls.Add(c); + break; + } + case 79: case 80: { + DatatypeDecl(dmod, module, out dt); + module.TopLevelDecls.Add(dt); + break; + } + case 82: { + NewtypeDecl(dmod, module, out td); + module.TopLevelDecls.Add(td); + break; + } + case 83: { + OtherTypeDecl(dmod, module, out td); + module.TopLevelDecls.Add(td); + break; + } + case 84: { + IteratorDecl(dmod, module, out iter); + module.TopLevelDecls.Add(iter); + break; + } + case 78: { + TraitDecl(dmod, module, out trait); + module.TopLevelDecls.Add(trait); + break; + } + case 38: case 39: case 40: case 41: case 42: case 81: case 87: case 88: case 89: case 90: { + ClassMemberDecl(dmod, membersDefaultClass, false, !DafnyOptions.O.AllowGlobals, +!isTopLevel && DafnyOptions.O.IronDafny && isAbstract); + break; + } + default: SynErr(141); break; + } + } + + void DeclModifier(ref DeclModifierData dmod) { + if (la.kind == 61) { + Get(); + dmod.IsAbstract = true; CheckAndSetToken(ref dmod.AbstractToken); + } else if (la.kind == 62) { + Get(); + dmod.IsGhost = true; CheckAndSetToken(ref dmod.GhostToken); + } else if (la.kind == 63) { + Get(); + dmod.IsStatic = true; CheckAndSetToken(ref dmod.StaticToken); + } else if (la.kind == 64) { + Get(); + dmod.IsProtected = true; CheckAndSetToken(ref dmod.ProtectedToken); + } else if (la.kind == 65) { + Get(); + dmod.IsExtern = true; CheckAndSetToken(ref dmod.ExternToken); + if (la.kind == 20) { + Get(); + bool isVerbatimString; + string s = Util.RemoveParsedStringQuotes(t.val, out isVerbatimString); + dmod.ExternName = new StringLiteralExpr(t, s, isVerbatimString); + + } + } else SynErr(142); + } + + void SubModuleDecl(DeclModifierData dmod, ModuleDefinition parent, out ModuleDecl submodule) { + Attributes attrs = null; IToken/*!*/ id; List namedModuleDefaultClassMembers = new List();; List idRefined = null, idPath = null, idAssignment = null; ModuleDefinition module; - ModuleDecl sm; submodule = null; // appease compiler - bool isAbstract = false; + bool isAbstract = dmod.IsAbstract; + bool isExclusively = false; bool opened = false; + CheckDeclModifiers(dmod, "Modules", AllowedDeclModifiers.Abstract | AllowedDeclModifiers.Extern); - if (la.kind == 59 || la.kind == 60) { - if (la.kind == 59) { - Get(); - isAbstract = true; - } - Expect(60); - while (la.kind == 44) { + if (la.kind == 66) { + Get(); + while (la.kind == 46) { Attribute(ref attrs); } NoUSIdent(out id); - if (la.kind == 61) { - Get(); - QualifiedModuleName(out idRefined); + EncodeExternAsAttribute(dmod, ref attrs, id, /* needAxiom */ false); + if (la.kind == 67 || la.kind == 68) { + if (la.kind == 67) { + Get(); + Expect(68); + QualifiedModuleName(out idRefined); + isExclusively = true; + } else { + Get(); + QualifiedModuleName(out idRefined); + isExclusively = false; + } } - module = new ModuleDefinition(id, id.val, isAbstract, false, idRefined == null ? null : idRefined, parent, attrs, false); - Expect(44); + module = new ModuleDefinition(id, id.val, isAbstract, false, isExclusively, idRefined == null ? null : idRefined, parent, attrs, false, this); + Expect(46); module.BodyStartTok = t; while (StartOf(1)) { - switch (la.kind) { - case 59: case 60: case 62: { - SubModuleDecl(module, out sm); - module.TopLevelDecls.Add(sm); - break; - } - case 67: { - ClassDecl(module, out c); - module.TopLevelDecls.Add(c); - break; - } - case 69: { - TraitDecl(module, out trait); - module.TopLevelDecls.Add(trait); - break; - } - case 73: case 74: { - DatatypeDecl(module, out dt); - module.TopLevelDecls.Add(dt); - break; - } - case 76: { - NewtypeDecl(module, out td); - module.TopLevelDecls.Add(td); - break; - } - case 77: { - OtherTypeDecl(module, out td); - module.TopLevelDecls.Add(td); - break; - } - case 78: { - IteratorDecl(module, out iter); - module.TopLevelDecls.Add(iter); - break; - } - case 36: case 37: case 38: case 39: case 40: case 70: case 71: case 72: case 75: case 81: case 82: case 83: case 84: { - ClassMemberDecl(namedModuleDefaultClassMembers, false, !DafnyOptions.O.AllowGlobals); - break; - } - } + TopDecl(module, namedModuleDefaultClassMembers, /* isTopLevel */ false, isAbstract); } - Expect(45); + Expect(47); module.BodyEndTok = t; module.TopLevelDecls.Add(new DefaultClassDecl(module, namedModuleDefaultClassMembers)); submodule = new LiteralModuleDecl(module, parent); - } else if (la.kind == 62) { + } else if (la.kind == 69) { Get(); - if (la.kind == 63) { + if (la.kind == 70) { Get(); opened = true; } NoUSIdent(out id); - if (la.kind == 64 || la.kind == 65) { - if (la.kind == 64) { + EncodeExternAsAttribute(dmod, ref attrs, id, /* needAxiom */ false); + if (StartOf(3)) { + if (la.kind == 71) { Get(); QualifiedModuleName(out idPath); submodule = new AliasModuleDecl(idPath, id, parent, opened); - } else { + } else if (la.kind == 72) { Get(); QualifiedModuleName(out idPath); - if (la.kind == 66) { - Get(); + if (IsDefaultImport()) { + Expect(73); QualifiedModuleName(out idAssignment); } submodule = new ModuleFacadeDecl(idPath, id, parent, idAssignment, opened); + errors.Warning(t, "\"import A as B\" has been deprecated; in the new syntax, it is \"import A:B\""); + + } else if (la.kind == 21) { + Get(); + QualifiedModuleName(out idPath); + submodule = new ModuleFacadeDecl(idPath, id, parent, idAssignment, opened); + } else { + Get(); + QualifiedModuleName(out idPath); + idPath.Insert(0, id); + submodule = new AliasModuleDecl(idPath, id, parent, opened); + } } - if (la.kind == 26) { - while (!(la.kind == 0 || la.kind == 26)) {SynErr(137); Get();} + if (la.kind == 28) { + while (!(la.kind == 0 || la.kind == 28)) {SynErr(143); Get();} Get(); errors.Warning(t, "the semi-colon that used to terminate a sub-module declaration has been deprecated; in the new syntax, just leave off the semi-colon"); } @@ -700,10 +860,54 @@ submodule = new AliasModuleDecl(idPath, id, parent, opened); } - } else SynErr(138); + } else if (la.kind == 73 || la.kind == 74) { + bool isDefault = false; + bool includeBody; + IToken exportId; + List exports = new List();; + List extends = new List(); + + if (la.kind == 73) { + Get(); + isDefault = true; + } + Expect(74); + NoUSIdent(out exportId); + if (la.kind == 75) { + Get(); + NoUSIdent(out id); + extends.Add(id.val); + while (la.kind == 22) { + Get(); + NoUSIdent(out id); + extends.Add(id.val); + } + } + Expect(46); + NoUSIdent(out id); + includeBody = false; + if (la.kind == 76) { + Get(); + includeBody = true; + } + exports.Add(new ExportSignature(id, includeBody)); + while (la.kind == 22) { + Get(); + NoUSIdent(out id); + includeBody = false; + if (la.kind == 76) { + Get(); + includeBody = true; + } + exports.Add(new ExportSignature(id, includeBody)); + } + Expect(47); + submodule = new ModuleExportDecl(exportId, parent, isDefault, exports, extends); + + } else SynErr(144); } - void ClassDecl(ModuleDefinition/*!*/ module, out ClassDecl/*!*/ c) { + void ClassDecl(DeclModifierData dmodClass, ModuleDefinition/*!*/ module, out ClassDecl/*!*/ c) { Contract.Requires(module != null); Contract.Ensures(Contract.ValueAtReturn(out c) != null); IToken/*!*/ id; @@ -713,39 +917,46 @@ List typeArgs = new List(); List members = new List(); IToken bodyStart; + CheckDeclModifiers(dmodClass, "Classes", AllowedDeclModifiers.Extern); + DeclModifierData dmod; - while (!(la.kind == 0 || la.kind == 67)) {SynErr(139); Get();} - Expect(67); - while (la.kind == 44) { + while (!(la.kind == 0 || la.kind == 77)) {SynErr(145); Get();} + Expect(77); + while (la.kind == 46) { Attribute(ref attrs); } NoUSIdent(out id); - if (la.kind == 50) { + EncodeExternAsAttribute(dmodClass, ref attrs, id, /* needAxiom */ false); + if (la.kind == 52) { GenericParameters(typeArgs); } - if (la.kind == 68) { + if (la.kind == 75) { Get(); Type(out trait); traits.Add(trait); - while (la.kind == 21) { + while (la.kind == 22) { Get(); Type(out trait); traits.Add(trait); } } - Expect(44); + Expect(46); bodyStart = t; - while (StartOf(2)) { - ClassMemberDecl(members, true, false); + while (StartOf(4)) { + dmod = new DeclModifierData(); + while (StartOf(2)) { + DeclModifier(ref dmod); + } + ClassMemberDecl(dmod, members, true, false, false); } - Expect(45); + Expect(47); c = new ClassDecl(id, id.val, module, typeArgs, members, attrs, traits); c.BodyStartTok = bodyStart; c.BodyEndTok = t; } - void DatatypeDecl(ModuleDefinition/*!*/ module, out DatatypeDecl/*!*/ dt) { + void DatatypeDecl(DeclModifierData dmod, ModuleDefinition/*!*/ module, out DatatypeDecl/*!*/ dt) { Contract.Requires(module != null); Contract.Ensures(Contract.ValueAtReturn(out dt)!=null); IToken/*!*/ id; @@ -754,30 +965,31 @@ List ctors = new List(); IToken bodyStart = Token.NoToken; // dummy assignment bool co = false; + CheckDeclModifiers(dmod, "Datatypes or codatatypes", AllowedDeclModifiers.None); - while (!(la.kind == 0 || la.kind == 73 || la.kind == 74)) {SynErr(140); Get();} - if (la.kind == 73) { + while (!(la.kind == 0 || la.kind == 79 || la.kind == 80)) {SynErr(146); Get();} + if (la.kind == 79) { Get(); - } else if (la.kind == 74) { + } else if (la.kind == 80) { Get(); co = true; - } else SynErr(141); - while (la.kind == 44) { + } else SynErr(147); + while (la.kind == 46) { Attribute(ref attrs); } NoUSIdent(out id); - if (la.kind == 50) { + if (la.kind == 52) { GenericParameters(typeArgs); } - Expect(64); + Expect(71); bodyStart = t; DatatypeMemberDecl(ctors); - while (la.kind == 22) { + while (la.kind == 23) { Get(); DatatypeMemberDecl(ctors); } - if (la.kind == 26) { - while (!(la.kind == 0 || la.kind == 26)) {SynErr(142); Get();} + if (la.kind == 28) { + while (!(la.kind == 0 || la.kind == 28)) {SynErr(148); Get();} Get(); errors.Warning(t, "the semi-colon that used to terminate a (co)datatype declaration has been deprecated; in the new syntax, just leave off the semi-colon"); } @@ -791,78 +1003,80 @@ } - void NewtypeDecl(ModuleDefinition module, out TopLevelDecl td) { + void NewtypeDecl(DeclModifierData dmod, ModuleDefinition module, out TopLevelDecl td) { IToken id, bvId; Attributes attrs = null; td = null; Type baseType = null; Expression wh; + CheckDeclModifiers(dmod, "Newtypes", AllowedDeclModifiers.None); - Expect(76); - while (la.kind == 44) { + Expect(82); + while (la.kind == 46) { Attribute(ref attrs); } NoUSIdent(out id); - Expect(64); + Expect(71); if (IsIdentColonOrBar()) { NoUSIdent(out bvId); - if (la.kind == 20) { + if (la.kind == 21) { Get(); Type(out baseType); } - if (baseType == null) { baseType = new OperationTypeProxy(true, true, false, false, false); } - Expect(22); + if (baseType == null) { baseType = new OperationTypeProxy(true, true, false, false, false, false); } + Expect(23); Expression(out wh, false, true); td = new NewtypeDecl(theVerifyThisFile ? id : new IncludeToken(id), id.val, module, new BoundVar(bvId, bvId.val, baseType), wh, attrs); - } else if (StartOf(3)) { + } else if (StartOf(5)) { Type(out baseType); td = new NewtypeDecl(theVerifyThisFile ? id : new IncludeToken(id), id.val, module, baseType, attrs); - } else SynErr(143); + } else SynErr(149); } - void OtherTypeDecl(ModuleDefinition module, out TopLevelDecl td) { + void OtherTypeDecl(DeclModifierData dmod, ModuleDefinition module, out TopLevelDecl td) { IToken id; Attributes attrs = null; var eqSupport = TypeParameter.EqualitySupportValue.Unspecified; var typeArgs = new List(); td = null; Type ty; + CheckDeclModifiers(dmod, "Type aliases", AllowedDeclModifiers.None); - Expect(77); - while (la.kind == 44) { + Expect(83); + while (la.kind == 46) { Attribute(ref attrs); } NoUSIdent(out id); - if (la.kind == 48) { + if (la.kind == 50) { Get(); - Expect(52); - Expect(49); + Expect(54); + Expect(51); eqSupport = TypeParameter.EqualitySupportValue.Required; - if (la.kind == 50) { + if (la.kind == 52) { GenericParameters(typeArgs); } - } else if (StartOf(4)) { - if (la.kind == 50) { + } else if (StartOf(6)) { + if (la.kind == 52) { GenericParameters(typeArgs); } - if (la.kind == 64) { + if (la.kind == 71) { Get(); Type(out ty); td = new TypeSynonymDecl(id, id.val, typeArgs, module, ty, attrs); } - } else SynErr(144); + } else SynErr(150); if (td == null) { td = new OpaqueTypeDecl(id, id.val, module, eqSupport, typeArgs, attrs); } - if (la.kind == 26) { - while (!(la.kind == 0 || la.kind == 26)) {SynErr(145); Get();} + if (la.kind == 28) { + while (!(la.kind == 0 || la.kind == 28)) {SynErr(151); Get();} Get(); errors.Warning(t, "the semi-colon that used to terminate an opaque-type declaration has been deprecated; in the new syntax, just leave off the semi-colon"); } } - void IteratorDecl(ModuleDefinition module, out IteratorDecl/*!*/ iter) { + void IteratorDecl(DeclModifierData dmod, ModuleDefinition module, out IteratorDecl/*!*/ iter) { Contract.Ensures(Contract.ValueAtReturn(out iter) != null); IToken/*!*/ id; Attributes attrs = null; @@ -884,20 +1098,21 @@ IToken signatureEllipsis = null; IToken bodyStart = Token.NoToken; IToken bodyEnd = Token.NoToken; + CheckDeclModifiers(dmod, "Iterators", AllowedDeclModifiers.None); - while (!(la.kind == 0 || la.kind == 78)) {SynErr(146); Get();} - Expect(78); - while (la.kind == 44) { + while (!(la.kind == 0 || la.kind == 84)) {SynErr(152); Get();} + Expect(84); + while (la.kind == 46) { Attribute(ref attrs); } NoUSIdent(out id); - if (la.kind == 48 || la.kind == 50) { - if (la.kind == 50) { + if (la.kind == 50 || la.kind == 52) { + if (la.kind == 52) { GenericParameters(typeArgs); } Formals(true, true, ins); - if (la.kind == 79 || la.kind == 80) { - if (la.kind == 79) { + if (la.kind == 85 || la.kind == 86) { + if (la.kind == 85) { Get(); } else { Get(); @@ -905,14 +1120,14 @@ } Formals(false, true, outs); } - } else if (la.kind == 57) { + } else if (la.kind == 59) { Get(); signatureEllipsis = t; - } else SynErr(147); - while (StartOf(5)) { + } else SynErr(153); + while (StartOf(7)) { IteratorSpec(reads, mod, decreases, req, ens, yieldReq, yieldEns, ref readsAttrs, ref modAttrs, ref decrAttrs); } - if (la.kind == 44) { + if (la.kind == 46) { BlockStmt(out body, out bodyStart, out bodyEnd); } iter = new IteratorDecl(id, id.val, module, typeArgs, ins, outs, @@ -926,98 +1141,85 @@ } - void TraitDecl(ModuleDefinition/*!*/ module, out TraitDecl/*!*/ trait) { - Contract.Requires(module != null); + void TraitDecl(DeclModifierData dmodIn, ModuleDefinition/*!*/ module, out TraitDecl/*!*/ trait) { + Contract.Requires(module != null); Contract.Ensures(Contract.ValueAtReturn(out trait) != null); + CheckDeclModifiers(dmodIn, "Traits", AllowedDeclModifiers.None); IToken/*!*/ id; Attributes attrs = null; List typeArgs = new List(); //traits should not support type parameters at the moment List members = new List(); IToken bodyStart; + DeclModifierData dmod; - while (!(la.kind == 0 || la.kind == 69)) {SynErr(148); Get();} - Expect(69); - while (la.kind == 44) { + while (!(la.kind == 0 || la.kind == 78)) {SynErr(154); Get();} + Expect(78); + while (la.kind == 46) { Attribute(ref attrs); } NoUSIdent(out id); - if (la.kind == 50) { + if (la.kind == 52) { GenericParameters(typeArgs); } - Expect(44); + Expect(46); bodyStart = t; - while (StartOf(2)) { - ClassMemberDecl(members, true, false); + while (StartOf(4)) { + dmod = new DeclModifierData(); + while (StartOf(2)) { + DeclModifier(ref dmod); + } + ClassMemberDecl(dmod, members, true, false, false); } - Expect(45); + Expect(47); trait = new TraitDecl(id, id.val, module, typeArgs, members, attrs); trait.BodyStartTok = bodyStart; trait.BodyEndTok = t; } - void ClassMemberDecl(List mm, bool allowConstructors, bool moduleLevelDecl) { + void ClassMemberDecl(DeclModifierData dmod, List mm, bool allowConstructors, bool moduleLevelDecl, bool isWithinAbstractModule) { Contract.Requires(cce.NonNullElements(mm)); Method/*!*/ m; Function/*!*/ f; - MemberModifiers mmod = new MemberModifiers(); - IToken staticToken = null, protectedToken = null; - while (la.kind == 70 || la.kind == 71 || la.kind == 72) { - if (la.kind == 70) { - Get(); - mmod.IsGhost = true; - } else if (la.kind == 71) { - Get(); - mmod.IsStatic = true; staticToken = t; - } else { - Get(); - mmod.IsProtected = true; protectedToken = t; - } - } - if (la.kind == 75) { + if (la.kind == 81) { if (moduleLevelDecl) { SemErr(la, "fields are not allowed to be declared at the module level; instead, wrap the field in a 'class' declaration"); - mmod.IsStatic = false; - mmod.IsProtected = false; + dmod.IsStatic = false; } - FieldDecl(mmod, mm); + FieldDecl(dmod, mm); } else if (IsFunctionDecl()) { - if (moduleLevelDecl && staticToken != null) { - errors.Warning(staticToken, "module-level functions are always non-instance, so the 'static' keyword is not allowed here"); - mmod.IsStatic = false; + if (moduleLevelDecl && dmod.StaticToken != null) { + errors.Warning(dmod.StaticToken, "module-level functions are always non-instance, so the 'static' keyword is not allowed here"); + dmod.IsStatic = false; } - FunctionDecl(mmod, out f); + FunctionDecl(dmod, isWithinAbstractModule, out f); mm.Add(f); - } else if (StartOf(6)) { - if (moduleLevelDecl && staticToken != null) { - errors.Warning(staticToken, "module-level methods are always non-instance, so the 'static' keyword is not allowed here"); - mmod.IsStatic = false; - } - if (protectedToken != null) { - SemErr(protectedToken, "only functions, not methods, can be declared 'protected'"); - mmod.IsProtected = false; + } else if (StartOf(8)) { + if (moduleLevelDecl && dmod.StaticToken != null) { + errors.Warning(dmod.StaticToken, "module-level methods are always non-instance, so the 'static' keyword is not allowed here"); + dmod.IsStatic = false; } - MethodDecl(mmod, allowConstructors, out m); + MethodDecl(dmod, allowConstructors, isWithinAbstractModule, out m); mm.Add(m); - } else SynErr(149); + } else SynErr(155); } void Attribute(ref Attributes attrs) { - string name; + IToken x; string name; var args = new List(); - Expect(44); - Expect(20); - Expect(1); - name = t.val; - if (StartOf(7)) { + Expect(46); + Expect(21); + NoUSIdent(out x); + name = x.val; + if (StartOf(9)) { Expressions(args); } - Expect(45); + Expect(47); attrs = new Attributes(name, args, attrs); } @@ -1035,7 +1237,7 @@ IToken id; ids = new List(); Ident(out id); ids.Add(id); - while (la.kind == 25) { + while (la.kind == 27) { Get(); Ident(out id); ids.Add(id); @@ -1053,29 +1255,29 @@ IToken/*!*/ id; TypeParameter.EqualitySupportValue eqSupport; - Expect(50); + Expect(52); NoUSIdent(out id); eqSupport = TypeParameter.EqualitySupportValue.Unspecified; - if (la.kind == 48) { + if (la.kind == 50) { Get(); - Expect(52); - Expect(49); + Expect(54); + Expect(51); eqSupport = TypeParameter.EqualitySupportValue.Required; } typeArgs.Add(new TypeParameter(id, id.val, eqSupport)); - while (la.kind == 21) { + while (la.kind == 22) { Get(); NoUSIdent(out id); eqSupport = TypeParameter.EqualitySupportValue.Unspecified; - if (la.kind == 48) { + if (la.kind == 50) { Get(); - Expect(52); - Expect(49); + Expect(54); + Expect(51); eqSupport = TypeParameter.EqualitySupportValue.Required; } typeArgs.Add(new TypeParameter(id, id.val, eqSupport)); } - Expect(51); + Expect(53); } void Type(out Type ty) { @@ -1083,29 +1285,28 @@ TypeAndToken(out tok, out ty); } - void FieldDecl(MemberModifiers mmod, List/*!*/ mm) { + void FieldDecl(DeclModifierData dmod, List/*!*/ mm) { Contract.Requires(cce.NonNullElements(mm)); Attributes attrs = null; IToken/*!*/ id; Type/*!*/ ty; + CheckDeclModifiers(dmod, "Fields", AllowedDeclModifiers.Ghost); - while (!(la.kind == 0 || la.kind == 75)) {SynErr(150); Get();} - Expect(75); - if (mmod.IsStatic) { SemErr(t, "fields cannot be declared 'static'"); } - - while (la.kind == 44) { + while (!(la.kind == 0 || la.kind == 81)) {SynErr(156); Get();} + Expect(81); + while (la.kind == 46) { Attribute(ref attrs); } FIdentType(out id, out ty); - mm.Add(new Field(id, id.val, mmod.IsGhost, ty, attrs)); - while (la.kind == 21) { + mm.Add(new Field(id, id.val, dmod.IsGhost, ty, attrs)); + while (la.kind == 22) { Get(); FIdentType(out id, out ty); - mm.Add(new Field(id, id.val, mmod.IsGhost, ty, attrs)); + mm.Add(new Field(id, id.val, dmod.IsGhost, ty, attrs)); } OldSemi(); } - void FunctionDecl(MemberModifiers mmod, out Function/*!*/ f) { + void FunctionDecl(DeclModifierData dmod, bool isWithinAbstractModule, out Function/*!*/ f) { Contract.Ensures(Contract.ValueAtReturn(out f)!=null); Attributes attrs = null; IToken/*!*/ id = Token.NoToken; // to please compiler @@ -1124,129 +1325,144 @@ IToken signatureEllipsis = null; bool missingOpenParen; - if (la.kind == 36) { + if (la.kind == 38) { Get(); - if (la.kind == 81) { + if (la.kind == 87) { Get(); isFunctionMethod = true; } - if (mmod.IsGhost) { SemErr(t, "functions cannot be declared 'ghost' (they are ghost by default)"); } + AllowedDeclModifiers allowed = AllowedDeclModifiers.AlreadyGhost | AllowedDeclModifiers.Static | AllowedDeclModifiers.Protected; + string caption = "Functions"; + if (isFunctionMethod) { + allowed |= AllowedDeclModifiers.Extern; + caption = "Function methods"; + } + CheckDeclModifiers(dmod, caption, allowed); - while (la.kind == 44) { + while (la.kind == 46) { Attribute(ref attrs); } NoUSIdent(out id); - if (la.kind == 48 || la.kind == 50) { - if (la.kind == 50) { + if (la.kind == 50 || la.kind == 52) { + if (la.kind == 52) { GenericParameters(typeArgs); } Formals(true, isFunctionMethod, formals); - Expect(20); + Expect(21); Type(out returnType); - } else if (la.kind == 57) { + } else if (la.kind == 59) { Get(); signatureEllipsis = t; - } else SynErr(151); - } else if (la.kind == 37) { + } else SynErr(157); + } else if (la.kind == 39) { Get(); isPredicate = true; - if (la.kind == 81) { + if (la.kind == 87) { Get(); isFunctionMethod = true; } - if (mmod.IsGhost) { SemErr(t, "predicates cannot be declared 'ghost' (they are ghost by default)"); } + AllowedDeclModifiers allowed = AllowedDeclModifiers.AlreadyGhost | AllowedDeclModifiers.Static | AllowedDeclModifiers.Protected; + string caption = "Predicates"; + if (isFunctionMethod) { + allowed |= AllowedDeclModifiers.Extern; + caption = "Predicate methods"; + } + CheckDeclModifiers(dmod, caption, allowed); - while (la.kind == 44) { + while (la.kind == 46) { Attribute(ref attrs); } NoUSIdent(out id); - if (StartOf(8)) { - if (la.kind == 50) { + if (StartOf(10)) { + if (la.kind == 52) { GenericParameters(typeArgs); } missingOpenParen = true; - if (la.kind == 48) { + if (la.kind == 50) { Formals(true, isFunctionMethod, formals); missingOpenParen = false; } if (missingOpenParen) { errors.Warning(t, "with the new support of higher-order functions in Dafny, parentheses-less predicates are no longer supported; in the new syntax, parentheses are required for the declaration and uses of predicates, even if the predicate takes no additional arguments"); } - if (la.kind == 20) { + if (la.kind == 21) { Get(); SemErr(t, "predicates do not have an explicitly declared return type; it is always bool"); } - } else if (la.kind == 57) { + } else if (la.kind == 59) { Get(); signatureEllipsis = t; - } else SynErr(152); - } else if (la.kind == 38) { + } else SynErr(158); + } else if (la.kind == 40) { Get(); - Expect(37); + Expect(39); isIndPredicate = true; - if (mmod.IsGhost) { SemErr(t, "inductive predicates cannot be declared 'ghost' (they are ghost by default)"); } + CheckDeclModifiers(dmod, "Inductive predicates", + AllowedDeclModifiers.AlreadyGhost | AllowedDeclModifiers.Static | AllowedDeclModifiers.Protected); - while (la.kind == 44) { + while (la.kind == 46) { Attribute(ref attrs); } NoUSIdent(out id); - if (la.kind == 48 || la.kind == 50) { - if (la.kind == 50) { + if (la.kind == 50 || la.kind == 52) { + if (la.kind == 52) { GenericParameters(typeArgs); } Formals(true, isFunctionMethod, formals); - if (la.kind == 20) { + if (la.kind == 21) { Get(); SemErr(t, "inductive predicates do not have an explicitly declared return type; it is always bool"); } - } else if (la.kind == 57) { + } else if (la.kind == 59) { Get(); signatureEllipsis = t; - } else SynErr(153); - } else if (la.kind == 40) { + } else SynErr(159); + } else if (la.kind == 42) { Get(); isCoPredicate = true; - if (mmod.IsGhost) { SemErr(t, "copredicates cannot be declared 'ghost' (they are ghost by default)"); } + CheckDeclModifiers(dmod, "Copredicates", + AllowedDeclModifiers.AlreadyGhost | AllowedDeclModifiers.Static | AllowedDeclModifiers.Protected); - while (la.kind == 44) { + while (la.kind == 46) { Attribute(ref attrs); } NoUSIdent(out id); - if (la.kind == 48 || la.kind == 50) { - if (la.kind == 50) { + if (la.kind == 50 || la.kind == 52) { + if (la.kind == 52) { GenericParameters(typeArgs); } Formals(true, isFunctionMethod, formals); - if (la.kind == 20) { + if (la.kind == 21) { Get(); SemErr(t, "copredicates do not have an explicitly declared return type; it is always bool"); } - } else if (la.kind == 57) { + } else if (la.kind == 59) { Get(); signatureEllipsis = t; - } else SynErr(154); - } else SynErr(155); + } else SynErr(160); + } else SynErr(161); decreases = isIndPredicate || isCoPredicate ? null : new List(); - while (StartOf(9)) { + while (StartOf(11)) { FunctionSpec(reqs, reads, ens, decreases); } - if (la.kind == 44) { + if (la.kind == 46) { FunctionBody(out body, out bodyStart, out bodyEnd); } - if (DafnyOptions.O.DisallowSoundnessCheating && body == null && ens.Count > 0 && !Attributes.Contains(attrs, "axiom") && !Attributes.Contains(attrs, "imported")) { + if (!isWithinAbstractModule && DafnyOptions.O.DisallowSoundnessCheating && body == null && ens.Count > 0 && + !Attributes.Contains(attrs, "axiom") && !Attributes.Contains(attrs, "imported")) { SemErr(t, "a function with an ensures clause must have a body, unless given the :axiom attribute"); } - + EncodeExternAsAttribute(dmod, ref attrs, id, /* needAxiom */ true); IToken tok = theVerifyThisFile ? id : new IncludeToken(id); if (isPredicate) { - f = new Predicate(tok, id.val, mmod.IsStatic, mmod.IsProtected, !isFunctionMethod, typeArgs, formals, + f = new Predicate(tok, id.val, dmod.IsStatic, dmod.IsProtected, !isFunctionMethod, typeArgs, formals, reqs, reads, ens, new Specification(decreases, null), body, Predicate.BodyOriginKind.OriginalOrInherited, attrs, signatureEllipsis); } else if (isIndPredicate) { - f = new InductivePredicate(tok, id.val, mmod.IsStatic, mmod.IsProtected, typeArgs, formals, + f = new InductivePredicate(tok, id.val, dmod.IsStatic, dmod.IsProtected, typeArgs, formals, reqs, reads, ens, body, attrs, signatureEllipsis); } else if (isCoPredicate) { - f = new CoPredicate(tok, id.val, mmod.IsStatic, mmod.IsProtected, typeArgs, formals, + f = new CoPredicate(tok, id.val, dmod.IsStatic, dmod.IsProtected, typeArgs, formals, reqs, reads, ens, body, attrs, signatureEllipsis); } else { - f = new Function(tok, id.val, mmod.IsStatic, mmod.IsProtected, !isFunctionMethod, typeArgs, formals, returnType, + f = new Function(tok, id.val, dmod.IsStatic, dmod.IsProtected, !isFunctionMethod, typeArgs, formals, returnType, reqs, reads, ens, new Specification(decreases, null), body, attrs, signatureEllipsis); } f.BodyStartTok = bodyStart; @@ -1259,7 +1475,7 @@ } - void MethodDecl(MemberModifiers mmod, bool allowConstructor, out Method/*!*/ m) { + void MethodDecl(DeclModifierData dmod, bool allowConstructor, bool isWithinAbstractModule, out Method/*!*/ m) { Contract.Ensures(Contract.ValueAtReturn(out m) !=null); IToken/*!*/ id = Token.NoToken; bool hasName = false; IToken keywordToken; @@ -1281,71 +1497,65 @@ IToken signatureEllipsis = null; IToken bodyStart = Token.NoToken; IToken bodyEnd = Token.NoToken; + AllowedDeclModifiers allowed = AllowedDeclModifiers.None; + string caption = ""; - while (!(StartOf(10))) {SynErr(156); Get();} + while (!(StartOf(12))) {SynErr(162); Get();} switch (la.kind) { - case 81: { + case 87: { Get(); + caption = "Methods"; + allowed = AllowedDeclModifiers.Ghost | AllowedDeclModifiers.Static + | AllowedDeclModifiers.Extern; break; } - case 39: { + case 41: { Get(); - isLemma = true; + isLemma = true; caption = "Lemmas"; + allowed = AllowedDeclModifiers.AlreadyGhost | AllowedDeclModifiers.Static + | AllowedDeclModifiers.Protected; break; } - case 82: { + case 88: { Get(); - isCoLemma = true; + isCoLemma = true; caption = "Colemmas"; + allowed = AllowedDeclModifiers.AlreadyGhost | AllowedDeclModifiers.Static + | AllowedDeclModifiers.Protected; break; } - case 83: { + case 89: { Get(); - isCoLemma = true; + isCoLemma = true; caption = "Comethods"; + allowed = AllowedDeclModifiers.AlreadyGhost | AllowedDeclModifiers.Static + | AllowedDeclModifiers.Protected; errors.Warning(t, "the 'comethod' keyword has been deprecated; it has been renamed to 'colemma'"); break; } - case 38: { + case 40: { Get(); - Expect(39); - isIndLemma = true; + Expect(41); + isIndLemma = true; caption = "Inductive lemmas"; + allowed = AllowedDeclModifiers.AlreadyGhost | AllowedDeclModifiers.Static; break; } - case 84: { + case 90: { Get(); if (allowConstructor) { isConstructor = true; } else { SemErr(t, "constructors are allowed only in classes"); - } + } + caption = "Constructors"; + allowed = AllowedDeclModifiers.None; break; } - default: SynErr(157); break; + default: SynErr(163); break; } keywordToken = t; - if (isLemma) { - if (mmod.IsGhost) { - SemErr(t, "lemmas cannot be declared 'ghost' (they are automatically 'ghost')"); - } - } else if (isConstructor) { - if (mmod.IsGhost) { - SemErr(t, "constructors cannot be declared 'ghost'"); - } - if (mmod.IsStatic) { - SemErr(t, "constructors cannot be declared 'static'"); - } - } else if (isIndLemma) { - if (mmod.IsGhost) { - SemErr(t, "inductive lemmas cannot be declared 'ghost' (they are automatically 'ghost')"); - } - } else if (isCoLemma) { - if (mmod.IsGhost) { - SemErr(t, "colemmas cannot be declared 'ghost' (they are automatically 'ghost')"); - } - } - - while (la.kind == 44) { + CheckDeclModifiers(dmod, caption, allowed); + while (la.kind == 46) { Attribute(ref attrs); } if (la.kind == 1) { @@ -1358,28 +1568,29 @@ SemErr(la, "a method must be given a name (expecting identifier)"); } } + EncodeExternAsAttribute(dmod, ref attrs, id, /* needAxiom */ true); - if (la.kind == 48 || la.kind == 50) { - if (la.kind == 50) { + if (la.kind == 50 || la.kind == 52) { + if (la.kind == 52) { GenericParameters(typeArgs); } - Formals(true, !mmod.IsGhost, ins); - if (la.kind == 80) { + Formals(true, !dmod.IsGhost, ins); + if (la.kind == 86) { Get(); if (isConstructor) { SemErr(t, "constructors cannot have out-parameters"); } - Formals(false, !mmod.IsGhost, outs); + Formals(false, !dmod.IsGhost, outs); } - } else if (la.kind == 57) { + } else if (la.kind == 59) { Get(); signatureEllipsis = t; - } else SynErr(158); - while (StartOf(11)) { + } else SynErr(164); + while (StartOf(13)) { MethodSpec(req, mod, ens, dec, ref decAttrs, ref modAttrs); } - if (la.kind == 44) { + if (la.kind == 46) { BlockStmt(out body, out bodyStart, out bodyEnd); } - if (DafnyOptions.O.DisallowSoundnessCheating && body == null && ens.Count > 0 && !Attributes.Contains(attrs, "axiom") && !Attributes.Contains(attrs, "imported") && !Attributes.Contains(attrs, "decl") && theVerifyThisFile) { + if (!isWithinAbstractModule && DafnyOptions.O.DisallowSoundnessCheating && body == null && ens.Count > 0 && !Attributes.Contains(attrs, "axiom") && !Attributes.Contains(attrs, "imported") && !Attributes.Contains(attrs, "decl") && theVerifyThisFile) { SemErr(t, "a method with an ensures clause must have a body, unless given the :axiom attribute"); } @@ -1388,16 +1599,16 @@ m = new Constructor(tok, hasName ? id.val : "_ctor", typeArgs, ins, req, new Specification(mod, modAttrs), ens, new Specification(dec, decAttrs), body, attrs, signatureEllipsis); } else if (isIndLemma) { - m = new InductiveLemma(tok, id.val, mmod.IsStatic, typeArgs, ins, outs, + m = new InductiveLemma(tok, id.val, dmod.IsStatic, typeArgs, ins, outs, req, new Specification(mod, modAttrs), ens, new Specification(dec, decAttrs), body, attrs, signatureEllipsis); } else if (isCoLemma) { - m = new CoLemma(tok, id.val, mmod.IsStatic, typeArgs, ins, outs, + m = new CoLemma(tok, id.val, dmod.IsStatic, typeArgs, ins, outs, req, new Specification(mod, modAttrs), ens, new Specification(dec, decAttrs), body, attrs, signatureEllipsis); } else if (isLemma) { - m = new Lemma(tok, id.val, mmod.IsStatic, typeArgs, ins, outs, + m = new Lemma(tok, id.val, dmod.IsStatic, typeArgs, ins, outs, req, new Specification(mod, modAttrs), ens, new Specification(dec, decAttrs), body, attrs, signatureEllipsis); } else { - m = new Method(tok, id.val, mmod.IsStatic, mmod.IsGhost, typeArgs, ins, outs, + m = new Method(tok, id.val, dmod.IsStatic, dmod.IsGhost, typeArgs, ins, outs, req, new Specification(mod, modAttrs), ens, new Specification(dec, decAttrs), body, attrs, signatureEllipsis); } m.BodyStartTok = bodyStart; @@ -1411,11 +1622,11 @@ IToken/*!*/ id; List formals = new List(); - while (la.kind == 44) { + while (la.kind == 46) { Attribute(ref attrs); } NoUSIdent(out id); - if (la.kind == 48) { + if (la.kind == 50) { FormalsOptionalIds(formals); } ctors.Add(new DatatypeCtor(id, id.val, formals, attrs)); @@ -1423,17 +1634,17 @@ void FormalsOptionalIds(List/*!*/ formals) { Contract.Requires(cce.NonNullElements(formals)); IToken/*!*/ id; Type/*!*/ ty; string/*!*/ name; bool isGhost; - Expect(48); - if (StartOf(12)) { + Expect(50); + if (StartOf(14)) { TypeIdentOptional(out id, out name, out ty, out isGhost); formals.Add(new Formal(id, name, ty, true, isGhost)); - while (la.kind == 21) { + while (la.kind == 22) { Get(); TypeIdentOptional(out id, out name, out ty, out isGhost); formals.Add(new Formal(id, name, ty, true, isGhost)); } } - Expect(49); + Expect(51); } void FIdentType(out IToken/*!*/ id, out Type/*!*/ ty) { @@ -1445,14 +1656,14 @@ } else if (la.kind == 2) { Get(); id = t; - } else SynErr(159); - Expect(20); + } else SynErr(165); + Expect(21); Type(out ty); } void OldSemi() { - if (la.kind == 26) { - while (!(la.kind == 0 || la.kind == 26)) {SynErr(160); Get();} + if (la.kind == 28) { + while (!(la.kind == 0 || la.kind == 28)) {SynErr(166); Get();} Get(); } } @@ -1461,7 +1672,7 @@ Expression e0; IToken endTok; EquivExpression(out e, allowSemi, allowLambda); if (SemiFollowsCall(allowSemi, e)) { - Expect(26); + Expect(28); endTok = t; Expression(out e0, allowSemi, allowLambda); e = new StmtExpr(e.tok, @@ -1475,7 +1686,7 @@ Contract.Ensures(Contract.ValueAtReturn(out id)!=null); Contract.Ensures(Contract.ValueAtReturn(out ty)!=null); isGhost = false; - if (la.kind == 70) { + if (la.kind == 62) { Get(); if (allowGhostKeyword) { isGhost = true; } else { SemErr(t, "formal cannot be declared 'ghost' in this context"); } } @@ -1485,7 +1696,7 @@ void IdentType(out IToken/*!*/ id, out Type/*!*/ ty, bool allowWildcardId) { Contract.Ensures(Contract.ValueAtReturn(out id) != null); Contract.Ensures(Contract.ValueAtReturn(out ty) != null); WildIdent(out id, allowWildcardId); - Expect(20); + Expect(21); Type(out ty); } @@ -1501,7 +1712,7 @@ IToken id; Type ty; Type optType = null; WildIdent(out id, true); - if (la.kind == 20) { + if (la.kind == 21) { Get(); Type(out ty); optType = ty; @@ -1514,7 +1725,7 @@ IToken id; Type ty; Type optType = null; WildIdent(out id, true); - if (la.kind == 20) { + if (la.kind == 21) { Get(); Type(out ty); optType = ty; @@ -1527,13 +1738,13 @@ Contract.Ensures(Contract.ValueAtReturn(out ty)!=null); Contract.Ensures(Contract.ValueAtReturn(out identName)!=null); string name = null; id = Token.NoToken; ty = new BoolType()/*dummy*/; isGhost = false; - if (la.kind == 70) { + if (la.kind == 62) { Get(); isGhost = true; } - if (StartOf(3)) { + if (StartOf(5)) { TypeAndToken(out id, out ty); - if (la.kind == 20) { + if (la.kind == 21) { Get(); UserDefinedType udt = ty as UserDefinedType; if (udt != null && udt.TypeArgs.Count == 0) { @@ -1547,9 +1758,9 @@ } else if (la.kind == 2) { Get(); id = t; name = id.val; - Expect(20); + Expect(21); Type(out ty); - } else SynErr(161); + } else SynErr(167); if (name != null) { identName = name; } else { @@ -1597,20 +1808,33 @@ case 13: { Get(); tok = t; gt = new List(); - if (la.kind == 50) { + if (la.kind == 52) { GenericInstantiation(gt); } if (gt.Count > 1) { SemErr("set type expects only one type argument"); } - ty = new SetType(gt.Count == 1 ? gt[0] : null); + ty = new SetType(true, gt.Count == 1 ? gt[0] : null); break; } case 14: { Get(); tok = t; gt = new List(); - if (la.kind == 50) { + if (la.kind == 52) { + GenericInstantiation(gt); + } + if (gt.Count > 1) { + SemErr("set type expects only one type argument"); + } + ty = new SetType(false, gt.Count == 1 ? gt[0] : null); + + break; + } + case 15: { + Get(); + tok = t; gt = new List(); + if (la.kind == 52) { GenericInstantiation(gt); } if (gt.Count > 1) { @@ -1620,10 +1844,10 @@ break; } - case 15: { + case 16: { Get(); tok = t; gt = new List(); - if (la.kind == 50) { + if (la.kind == 52) { GenericInstantiation(gt); } if (gt.Count > 1) { @@ -1638,10 +1862,10 @@ tok = t; ty = new UserDefinedType(tok, tok.val, null); break; } - case 16: { + case 17: { Get(); tok = t; gt = new List(); - if (la.kind == 50) { + if (la.kind == 52) { GenericInstantiation(gt); } if (gt.Count == 0) { @@ -1655,10 +1879,10 @@ break; } - case 17: { + case 18: { Get(); tok = t; gt = new List(); - if (la.kind == 50) { + if (la.kind == 52) { GenericInstantiation(gt); } if (gt.Count == 0) { @@ -1675,7 +1899,7 @@ case 5: { Get(); tok = t; gt = null; - if (la.kind == 50) { + if (la.kind == 52) { gt = new List(); GenericInstantiation(gt); } @@ -1684,19 +1908,19 @@ break; } - case 48: { + case 50: { Get(); tok = t; tupleArgTypes = new List(); - if (StartOf(3)) { + if (StartOf(5)) { Type(out ty); tupleArgTypes.Add(ty); - while (la.kind == 21) { + while (la.kind == 22) { Get(); Type(out ty); tupleArgTypes.Add(ty); } } - Expect(49); + Expect(51); if (tupleArgTypes.Count == 1) { // just return the type 'ty' } else { @@ -1711,11 +1935,11 @@ Expression e; tok = t; NameSegmentForTypeName(out e); tok = t; - while (la.kind == 25) { + while (la.kind == 27) { Get(); Expect(1); tok = t; List typeArgs = null; - if (la.kind == 50) { + if (la.kind == 52) { typeArgs = new List(); GenericInstantiation(typeArgs); } @@ -1724,9 +1948,9 @@ ty = new UserDefinedType(e.tok, e); break; } - default: SynErr(162); break; + default: SynErr(168); break; } - if (la.kind == 28) { + if (la.kind == 30) { Type t2; Get(); tok = t; @@ -1744,17 +1968,17 @@ void Formals(bool incoming, bool allowGhostKeyword, List formals) { Contract.Requires(cce.NonNullElements(formals)); IToken id; Type ty; bool isGhost; - Expect(48); - if (la.kind == 1 || la.kind == 70) { + Expect(50); + if (la.kind == 1 || la.kind == 62) { GIdentType(allowGhostKeyword, out id, out ty, out isGhost); formals.Add(new Formal(id, id.val, ty, incoming, isGhost)); - while (la.kind == 21) { + while (la.kind == 22) { Get(); GIdentType(allowGhostKeyword, out id, out ty, out isGhost); formals.Add(new Formal(id, id.val, ty, incoming, isGhost)); } } - Expect(49); + Expect(51); } void IteratorSpec(List/*!*/ reads, List/*!*/ mod, List decreases, @@ -1763,45 +1987,45 @@ ref Attributes readsAttrs, ref Attributes modAttrs, ref Attributes decrAttrs) { Expression/*!*/ e; FrameExpression/*!*/ fe; bool isFree = false; bool isYield = false; Attributes ensAttrs = null; - while (!(StartOf(13))) {SynErr(163); Get();} - if (la.kind == 42) { + while (!(StartOf(15))) {SynErr(169); Get();} + if (la.kind == 44) { Get(); while (IsAttribute()) { Attribute(ref readsAttrs); } FrameExpression(out fe, false, false); reads.Add(fe); - while (la.kind == 21) { + while (la.kind == 22) { Get(); FrameExpression(out fe, false, false); reads.Add(fe); } OldSemi(); - } else if (la.kind == 41) { + } else if (la.kind == 43) { Get(); while (IsAttribute()) { Attribute(ref modAttrs); } FrameExpression(out fe, false, false); mod.Add(fe); - while (la.kind == 21) { + while (la.kind == 22) { Get(); FrameExpression(out fe, false, false); mod.Add(fe); } OldSemi(); - } else if (StartOf(14)) { - if (la.kind == 85) { + } else if (StartOf(16)) { + if (la.kind == 91) { Get(); isFree = true; errors.Warning(t, "the 'free' keyword is soon to be deprecated"); } - if (la.kind == 87) { + if (la.kind == 93) { Get(); isYield = true; } - if (la.kind == 43) { + if (la.kind == 45) { Get(); Expression(out e, false, false); OldSemi(); @@ -1811,7 +2035,7 @@ req.Add(new MaybeFreeExpression(e, isFree)); } - } else if (la.kind == 86) { + } else if (la.kind == 92) { Get(); while (IsAttribute()) { Attribute(ref ensAttrs); @@ -1824,27 +2048,27 @@ ens.Add(new MaybeFreeExpression(e, isFree, ensAttrs)); } - } else SynErr(164); - } else if (la.kind == 34) { + } else SynErr(170); + } else if (la.kind == 36) { Get(); while (IsAttribute()) { Attribute(ref decrAttrs); } DecreasesList(decreases, false, false); OldSemi(); - } else SynErr(165); + } else SynErr(171); } void BlockStmt(out BlockStmt/*!*/ block, out IToken bodyStart, out IToken bodyEnd) { Contract.Ensures(Contract.ValueAtReturn(out block) != null); List body = new List(); - Expect(44); + Expect(46); bodyStart = t; - while (StartOf(15)) { + while (StartOf(17)) { Stmt(body); } - Expect(45); + Expect(47); bodyEnd = t; block = new BlockStmt(bodyStart, bodyEnd, body); } @@ -1854,33 +2078,33 @@ Contract.Requires(cce.NonNullElements(req)); Contract.Requires(cce.NonNullElements(mod)); Contract.Requires(cce.NonNullElements(ens)); Contract.Requires(cce.NonNullElements(decreases)); Expression/*!*/ e; FrameExpression/*!*/ fe; bool isFree = false; Attributes ensAttrs = null; - while (!(StartOf(16))) {SynErr(166); Get();} - if (la.kind == 41) { + while (!(StartOf(18))) {SynErr(172); Get();} + if (la.kind == 43) { Get(); while (IsAttribute()) { Attribute(ref modAttrs); } FrameExpression(out fe, false, false); mod.Add(fe); - while (la.kind == 21) { + while (la.kind == 22) { Get(); FrameExpression(out fe, false, false); mod.Add(fe); } OldSemi(); - } else if (la.kind == 43 || la.kind == 85 || la.kind == 86) { - if (la.kind == 85) { + } else if (la.kind == 45 || la.kind == 91 || la.kind == 92) { + if (la.kind == 91) { Get(); isFree = true; errors.Warning(t, "the 'free' keyword is soon to be deprecated"); } - if (la.kind == 43) { + if (la.kind == 45) { Get(); Expression(out e, false, false); OldSemi(); req.Add(new MaybeFreeExpression(e, isFree)); - } else if (la.kind == 86) { + } else if (la.kind == 92) { Get(); while (IsAttribute()) { Attribute(ref ensAttrs); @@ -1888,15 +2112,15 @@ Expression(out e, false, false); OldSemi(); ens.Add(new MaybeFreeExpression(e, isFree, ensAttrs)); - } else SynErr(167); - } else if (la.kind == 34) { + } else SynErr(173); + } else if (la.kind == 36) { Get(); while (IsAttribute()) { Attribute(ref decAttrs); } DecreasesList(decreases, true, false); OldSemi(); - } else SynErr(168); + } else SynErr(174); } void FrameExpression(out FrameExpression fe, bool allowSemi, bool allowLambda) { @@ -1906,21 +2130,21 @@ string fieldName = null; IToken feTok = null; fe = null; - if (StartOf(7)) { + if (StartOf(9)) { Expression(out e, allowSemi, allowLambda); feTok = e.tok; - if (la.kind == 88) { + if (la.kind == 94) { Get(); Ident(out id); fieldName = id.val; feTok = id; } fe = new FrameExpression(feTok, e, fieldName); - } else if (la.kind == 88) { + } else if (la.kind == 94) { Get(); Ident(out id); fieldName = id.val; fe = new FrameExpression(id, new ImplicitThisExpr(id), fieldName); - } else SynErr(169); + } else SynErr(175); } void DecreasesList(List decreases, bool allowWildcard, bool allowLambda) { @@ -1932,7 +2156,7 @@ decreases.Add(e); } - while (la.kind == 21) { + while (la.kind == 22) { Get(); PossiblyWildExpression(out e, allowLambda); if (!allowWildcard && e is WildcardExpr) { @@ -1946,15 +2170,15 @@ void GenericInstantiation(List/*!*/ gt) { Contract.Requires(cce.NonNullElements(gt)); Type/*!*/ ty; - Expect(50); + Expect(52); Type(out ty); gt.Add(ty); - while (la.kind == 21) { + while (la.kind == 22) { Get(); Type(out ty); gt.Add(ty); } - Expect(51); + Expect(53); } void NameSegmentForTypeName(out Expression e) { @@ -1962,7 +2186,7 @@ List typeArgs = null; Ident(out id); - if (la.kind == 50) { + if (la.kind == 52) { typeArgs = new List(); GenericInstantiation(typeArgs); } @@ -1975,28 +2199,28 @@ Contract.Requires(cce.NonNullElements(reads)); Contract.Requires(decreases == null || cce.NonNullElements(decreases)); Expression/*!*/ e; FrameExpression/*!*/ fe; - while (!(StartOf(17))) {SynErr(170); Get();} - if (la.kind == 43) { + while (!(StartOf(19))) {SynErr(176); Get();} + if (la.kind == 45) { Get(); Expression(out e, false, false); OldSemi(); reqs.Add(e); - } else if (la.kind == 42) { + } else if (la.kind == 44) { Get(); PossiblyWildFrameExpression(out fe, false); reads.Add(fe); - while (la.kind == 21) { + while (la.kind == 22) { Get(); PossiblyWildFrameExpression(out fe, false); reads.Add(fe); } OldSemi(); - } else if (la.kind == 86) { + } else if (la.kind == 92) { Get(); Expression(out e, false, false); OldSemi(); ens.Add(e); - } else if (la.kind == 34) { + } else if (la.kind == 36) { Get(); if (decreases == null) { SemErr(t, "'decreases' clauses are meaningless for copredicates, so they are not allowed"); @@ -2005,37 +2229,37 @@ DecreasesList(decreases, false, false); OldSemi(); - } else SynErr(171); + } else SynErr(177); } void FunctionBody(out Expression/*!*/ e, out IToken bodyStart, out IToken bodyEnd) { Contract.Ensures(Contract.ValueAtReturn(out e) != null); e = dummyExpr; - Expect(44); + Expect(46); bodyStart = t; Expression(out e, true, true); - Expect(45); + Expect(47); bodyEnd = t; } void PossiblyWildFrameExpression(out FrameExpression fe, bool allowSemi) { Contract.Ensures(Contract.ValueAtReturn(out fe) != null); fe = dummyFrameExpr; - if (la.kind == 55) { + if (la.kind == 57) { Get(); fe = new FrameExpression(t, new WildcardExpr(t), null); - } else if (StartOf(18)) { + } else if (StartOf(20)) { FrameExpression(out fe, allowSemi, false); - } else SynErr(172); + } else SynErr(178); } void PossiblyWildExpression(out Expression e, bool allowLambda) { Contract.Ensures(Contract.ValueAtReturn(out e)!=null); e = dummyExpr; - if (la.kind == 55) { + if (la.kind == 57) { Get(); e = new WildcardExpr(t); - } else if (StartOf(7)) { + } else if (StartOf(9)) { Expression(out e, false, allowLambda); - } else SynErr(173); + } else SynErr(179); } void Stmt(List/*!*/ ss) { @@ -2052,92 +2276,92 @@ IToken bodyStart, bodyEnd; int breakCount; - while (!(StartOf(19))) {SynErr(174); Get();} + while (!(StartOf(21))) {SynErr(180); Get();} switch (la.kind) { - case 44: { + case 46: { BlockStmt(out bs, out bodyStart, out bodyEnd); s = bs; break; } - case 99: { + case 104: { AssertStmt(out s); break; } - case 29: { + case 31: { AssumeStmt(out s); break; } - case 100: { + case 105: { PrintStmt(out s); break; } - case 1: case 2: case 3: case 4: case 8: case 10: case 18: case 19: case 22: case 48: case 129: case 130: case 131: case 132: case 133: case 134: { + case 1: case 2: case 3: case 4: case 8: case 10: case 19: case 20: case 23: case 50: case 133: case 134: case 135: case 136: case 137: case 138: { UpdateStmt(out s); break; } - case 70: case 75: { + case 62: case 81: { VarDeclStatement(out s); break; } - case 96: { + case 101: { IfStmt(out s); break; } - case 97: { + case 102: { WhileStmt(out s); break; } - case 98: { + case 103: { MatchStmt(out s); break; } - case 101: case 102: { + case 106: case 107: { ForallStmt(out s); break; } - case 30: { + case 32: { CalcStmt(out s); break; } - case 103: { + case 108: { ModifyStmt(out s); break; } - case 89: { + case 95: { Get(); x = t; NoUSIdent(out id); - Expect(20); + Expect(21); OneStmt(out s); s.Labels = new LList