// Variation of Bellman-Ford algorithm from Dinitz & Itzhak (2017). It interleaves Dijkstra with // traditional Bellman-Ford, potentially terminating much earlier if the algorithm stabilises. /**************************************** * Distance type */ adt Distance { Inf() Fin(dist: Int) } // Checks if d1 < d2, handling Inf function dist_lt(c1: Distance, c2: Distance): Bool decreases ensures result ==> c1 != c2 ensures result ==> !c1.isInf ensures !result && c2 == Inf() ==> c1.isInf { c1.isInf ? false : (c2.isInf ? true : c1.dist < c2.dist) } define dist_leq(d1, d2) !dist_lt(d2, d1) method dist_trans() ensures forall d1: Distance, d2: Distance, d3: Distance :: dist_lt(d1,d2) && dist_lt(d2,d3) ==> dist_lt(d1,d3) ensures forall d1: Distance, d2: Distance, d3: Distance :: !dist_lt(d1,d2) && !dist_lt(d2,d3) ==> !dist_lt(d1,d3) { } // Adds two costs function dist_add(c1: Distance, c2: Distance): Distance { (c1.isInf || c2.isInf) ? Inf() : Fin(c1.dist + c2.dist) } // Adds a finite weight to a cost function dist_plus(c1: Distance, w: Int): Distance { c1.isInf ? Inf() : Fin(c1.dist + w) } /**************************************** * Additional types */ // Node fields field d: Distance // The total cost from the source to this node field pi: Ref // The field pidx: Int // The edge taken on the best path define node_acc(n) acc(n.d) && acc(n.pi) && acc(n.pidx) define node_read(n) acc(n.d, 1/4) && acc(n.pi, 1/4) && acc(n.pidx, 1/4) adt Edge { E(src: Ref, dst: Ref, wgt: Int) } adt Path { Empty(end: Ref) Step(prv: Ref, idx: Int, tail: Path) } define prev(p,g) edges2(g)[p.prv][p.idx] define prev_wgt(p,g) weights2(g)[p.prv][p.idx] // Length is number of edges, not nodes function length(p: Path): Int ensures result >= 0 { p.isEmpty ? 0 : 1 + length(p.tail) } // Either len < i_n or the last edge starts from a scanned node function partial_length2(p: Path, i_n: Int, scanned: Set[Ref], g: Graph): Bool requires i_n >= 1 requires valid_edges(p,g) { length(p) < i_n || (length(p) == i_n && p.prv in scanned) } function valid_edges(p: Path, g: Graph): Bool { p.isEmpty ? true : (p.prv in nodes(g) && 0 <= p.idx < |edges2(g)[p.prv]| && valid_edges(p.tail, g)) } function path_cost(p: Path, g: Graph): Int { (p.isEmpty || p.idx < 0 || !(p.prv in nodes(g)) || p.idx >= |edges2(g)[p.prv]|) ? 0 : prev_wgt(p,g) + path_cost(p.tail, g) } function valid_path(p: Path, from: Ref, to: Ref, g: Graph): Bool { p.isEmpty ? from == to && from == p.end : (p.prv in nodes(g) && 0 <= p.idx < |edges2(g)[p.prv]| && edges2(g)[p.prv][p.idx] == to && valid_path(p.tail, from, p.prv, g)) } /**************************************** * Graph definition */ domain Graph { function nodes(g: Graph): Seq[Ref] function root(g: Graph): Ref axiom ax_nodes_not_null { forall g: Graph :: { nodes(g) } !(null in nodes(g)) } axiom ax_nodes_no_dup { forall g: Graph, i: Int, j: Int :: { nodes(g)[i], nodes(g)[j] } 0 <= i < j < |nodes(g)| ==> nodes(g)[i] != nodes(g)[j] } axiom ax_root_in_g { forall g: Graph :: { nodes(g) } root(g) in nodes(g) } function edges2(g: Graph): Map[Ref,Seq[Ref]] function weights2(g: Graph): Map[Ref,Seq[Int]] axiom ax_edge_weight_keys { forall g: Graph, n: Ref :: { n in nodes(g), n in weights2(g) }{ n in nodes(g), n in edges2(g) } n in nodes(g) == (n in weights2(g) && n in edges2(g)) } axiom ax_edges2_sound { forall g: Graph, n1: Ref, n2: Ref :: { n1 in edges2(g), n2 in edges2(g)[n1] }{ n1 in edges2(g), n2 in nodes(g) } n1 in edges2(g) && n2 in edges2(g)[n1] ==> n2 in nodes(g) } axiom ax_weights_len { forall g: Graph, n: Ref :: { edges2(g)[n] }{ weights2(g)[n] } n in edges2(g) ==> |edges2(g)[n]| == |weights2(g)[n]| } axiom ax_anti_reflexive2 { forall g: Graph, n: Ref :: { n in edges2(g)[n] } n in edges2(g) ==> !(n in edges2(g)[n]) } } function reachable(n: Ref, g: Graph): Bool { exists p: Path :: valid_path(p, root(g), n, g) } function reachable_cost(n: Ref, c: Int, g: Graph): Bool { exists p: Path :: valid_path(p, root(g), n, g) && path_cost(p,g) == c } function complement(stack: Seq[Ref], g: Graph): Set[Ref] ensures forall n: Ref :: n in result == (n in nodes(g) && !(n in stack)) define graph_acc(g) forall n: Ref :: { n in nodes(g) } n in nodes(g) ==> node_acc(n) define graph_read(g) forall n: Ref :: { n in nodes(g) } n in nodes(g) ==> node_read(n) // For an edge: src -> dst, we have dst.d <= src.d + wgt function relaxed(g: Graph, _src: Ref, idx: Int): Bool requires _src in nodes(g) && 0 <= idx < |edges2(g)[_src]| requires node_read(_src) && node_read(edges2(g)[_src][idx]) { // let _dst == (edges2(g)[_src][idx]) in // let _wgt == (weights2(g)[_src][idx]) in // dist_leq(_dst.d, dist_plus(_src.d, _wgt)) dist_leq(edges2(g)[_src][idx].d, dist_plus(_src.d, weights2(g)[_src][idx])) } function all_relaxed(g: Graph): Bool requires graph_read(g) { forall n: Ref, i: Int :: { relaxed(g,n,i) } n in nodes(g) && 0 <= i < |edges2(g)[n]| ==> relaxed(g,n,i) } /**************************************** * Graph Lemmas */ // LEMMA: If all edges are relaxed, there cannot be a negative weight loop method lemma_relaxed_loopfree(g: Graph) requires graph_read(g) requires all_relaxed(g) ensures graph_read(g) ensures !neg_loop(g) method lemma_relaxed_invs(g: Graph, i_n: Int) requires graph_read(g) requires 1 <= i_n <= |nodes(g)| requires inv_sound(g) && inv_complete(g,i_n,Set()) && inv_optimal_qtf(g,i_n,Set()) requires all_relaxed(g) ensures graph_read(g) ensures post_complete(g) && post_optimal(g) method lemma_changed_to_negloop(g: Graph) requires graph_read(g) requires inv_sound(g) && inv_complete(g,|nodes(g)|,Set()) && inv_optimal_qtf(g,|nodes(g)|,Set()) requires true // Something else to indicate a change has been made ??? ensures graph_read(g) ensures neg_loop(g) method lemma_reach_ext(u: Ref, idx: Int, g: Graph) requires u in nodes(g) && node_read(u) requires reachable_cost(u,u.d.dist,g) requires 0 <= idx < |edges2(g)[u]| ensures node_read(u) ensures reachable_cost(edges2(g)[u][idx], u.d.dist + weights2(g)[u][idx], g) { assert exists p: Path :: valid_path(p,root(g),u,g) && path_cost(p,g) == u.d.dist var p1: Path assume valid_path(p1,root(g),u,g) && path_cost(p1,g) == u.d.dist var p2: Path := Step(u,idx,p1) var v: Ref := edges2(g)[u][idx] assert path_cost(p2,g) == u.d.dist + weights2(g)[u][idx] } /**************************************** * Invariants, Postconditions */ // Soundness: define inv_sound_body(n,g) (n in nodes(g) && n.d.isFin) ==> reachable_cost(n, n.d.dist, g) function inv_sound(g: Graph): Bool requires graph_read(g) { forall n: Ref :: { n in nodes(g), reachable_cost(n, n.d.dist, g) } inv_sound_body(n, g) } // Completeness: function inv_complete(g: Graph, i_n: Int, scanned: Set[Ref]): Bool requires graph_read(g) && i_n >= 1 { forall n: Ref :: { n in nodes(g), n.d } n in nodes(g) && (exists p: Path :: valid_edges(p, g) && valid_path(p, root(g), n, g) && partial_length2(p, i_n, scanned, g)) ==> n.d.isFin } // Optimality: function inv_optimal(g: Graph, i_n: Int, scanned: Set[Ref]): Bool requires graph_read(g) && i_n >= 1 { forall n: Ref, p: Path :: { n in nodes(g), valid_path(p,root(g),n,g), n.d, path_cost(p,g) } (n in nodes(g) && n.d.isFin && valid_edges(p, g) && valid_path(p, root(g), n, g) && partial_length2(p, i_n, scanned,g)) ==> dist_leq(n.d, Fin(path_cost(p,g))) } function inv_optimal_body(g: Graph, i_n: Int, scanned: Set[Ref], n: Ref, p: Path): Bool requires graph_read(g) && i_n >= 1 { (n in nodes(g) && n.d.isFin && valid_edges(p, g) && valid_path(p, root(g), n, g) && partial_length2(p, i_n, scanned, g)) ==> dist_leq(n.d, Fin(path_cost(p, g))) } @opaque() function inv_optimal_qtf(g: Graph, i_n: Int, scanned: Set[Ref]): Bool requires graph_read(g) && i_n >= 1 { forall n: Ref, p: Path :: { n in nodes(g), valid_path(p, root(g), n, g), n.d } inv_optimal_body(g, i_n, scanned, n, p) } method instantiate_inv_optimal(g: Graph, i_n: Int, scanned: Set[Ref], n: Ref, p: Path) requires graph_read(g) && i_n >= 1 requires inv_optimal_qtf(g,i_n,scanned) ensures graph_read(g) ensures inv_optimal_body(g,i_n,scanned,n,p) function inv_par_optimal_body(g: Graph, min: Ref, i_n: Int, i_e: Int, p: Path, i: Int): Bool requires graph_read(g) && min in nodes(g) requires i_n >= 1 && 0 <= i_e <= |edges2(g)[min]| { (valid_edges(p,g) && length(p) == i_n-1 && valid_path(p,root(g),min,g) && 0 <= i < i_e) ==> !dist_lt(Fin(path_cost(Step(min,i,p),g)),edges2(g)[min][i].d) } function inv_par_optimal(g: Graph, min: Ref, i_n: Int, i_e: Int): Bool requires graph_read(g) && min in nodes(g) requires i_n >= 1 && 0 <= i_e <= |edges2(g)[min]| method instantiate_inv_par_optimal(g: Graph, min: Ref, i_n: Int, i_e: Int, p: Path, i: Int) requires graph_read(g) && min in nodes(g) requires i_n >= 1 && 0 <= i_e <= |edges2(g)[min]| requires inv_par_optimal(g, min, i_n, i_e) ensures graph_read(g) ensures inv_par_optimal_body(g, min, i_n, i_e, p, i) // Pi is either null or a valid node with a valid edge idx function inv_pi_valid(g: Graph): Bool requires graph_read(g) { (forall n: Ref :: { n in nodes(g), n.pi } n in nodes(g) ==> n.pi == null || (n.pi in nodes(g) && 0 <= n.pidx < |edges2(g)[n.pi]|)) && (forall n: Ref :: { n in nodes(g), n.pi } n in nodes(g) && n != root(g) ==> (n.pi == null) == (n.d.isInf)) && ((root(g).d == Fin(0)) == (root(g).pi == null)) } // Going to pi removes at least the edge weight function inv_pi_correct(g: Graph): Bool requires graph_read(g) && inv_pi_valid(g) { forall n: Ref :: { n in nodes(g), n.pi, edges2(g)[n.pi], weights2(g)[n.pi][n.pidx] } n in nodes(g) && n.pi != null ==> dist_leq(dist_plus(n.pi.d, weights2(g)[n.pi][n.pidx]), n.d) } // Post Conditions function post_complete(g: Graph): Bool requires graph_read(g) { forall n: Ref :: { n in nodes(g) } n in nodes(g) && (exists p: Path :: valid_path(p, root(g), n, g)) ==> n.d.isFin } function post_optimal(g: Graph): Bool requires graph_read(g) { forall n: Ref, p: Path :: { n in nodes(g), valid_path(p,root(g),n,g) } n in nodes(g) && valid_path(p,root(g),n,g) ==> !dist_lt(Fin(path_cost(p,g)),n.d) } define neg_loop(g) exists n: Ref, loop: Path, reach: Path :: valid_path(reach, root(g), n, g) && valid_path(loop, n, n, g) && path_cost(loop,g) < 0 @opaque() function post_condition(g: Graph, solvable: Bool): Bool requires graph_read(g) { neg_loop(g) ? !solvable : (solvable && inv_sound(g) && post_complete(g) && post_optimal(g)) } /**************************************** * Bellman-Ford algorithm */ function dummy(b: Bool): Bool { true } // To test Alex's hypothesis about terms in assertions define initialised(g) forall n: Ref :: { n in nodes(g) } n in nodes(g) ==> n.pi == null && (n == root(g) ? n.d == Fin(0) : n.d.isInf) method bellmanFord(g: Graph) returns (solvable: Bool) requires graph_acc(g) && initialised(g) ensures graph_acc(g) ensures post_condition(g, solvable) { //dist_trans() var node_i: Int := 1 solvable := false assert valid_path(Empty(root(g)), root(g), root(g), g) // Needed for soundness // Not all of these are needed: assert inv_sound(g) assert inv_complete(g,node_i,Set()) // works in carbon not silicon // Establish inv_optimal going into the loop var nd_branch_1: Bool if (nd_branch_1) { var n: Ref var p: Path assert inv_optimal_body(g, node_i, Set(), n, p) assume false } assume inv_optimal_qtf(g, node_i, Set()) while (node_i < |nodes(g)| && !solvable) decreases |nodes(g)| - node_i invariant graph_acc(g) invariant 0 < node_i && node_i <= |nodes(g)| invariant inv_sound(g) invariant inv_complete(g,node_i,Set()) invariant inv_optimal_qtf(g,node_i,Set()) //invariant inv_pi_valid(g) //&& inv_pi_correct(g) invariant solvable ==> all_relaxed(g) { var stack: Seq[Ref] := nodes(g) var changed: Bool := false var continue: Bool := true // Use inv_optimal_qtf(g,node_i,Set()) from first loop // to get inv_optimal_qtf(g, node_i, complement(stack,g)) for next loop var nd_branch_2: Bool if (nd_branch_2) { var n: Ref var p: Path instantiate_inv_optimal(g, node_i, Set(), n, p) assert inv_optimal_body(g, node_i, complement(stack, g), n, p) assume false } assume inv_optimal_qtf(g, node_i, complement(stack,g)) label dijk_scan while (continue) invariant graph_acc(g) invariant forall i: Int :: { stack[i] } 0 <= i < |stack| ==> stack[i] in nodes(g) invariant forall i: Int, j: Int :: { stack[i], stack[j] } 0 <= i < j < |stack| ==> stack[i] != stack[j] invariant continue ==> |stack| > 0 invariant !continue ==> (forall i: Int :: 0 <= i < |stack| ==> stack[i].d.isInf) invariant inv_sound(g) invariant inv_complete(g, node_i, complement(stack, g)) invariant inv_optimal_qtf(g, node_i, complement(stack, g)) //invariant inv_pi_valid(g) //&& inv_pi_correct(g) // All n.d are (non-strictly) decreasing invariant forall n: Ref :: n in nodes(g) ==> !dist_lt(old[dijk_scan](n.d), n.d) invariant !changed == (forall n: Ref :: { n.d } n in nodes(g) ==> n.d == old[dijk_scan](n.d)) invariant !changed ==> (forall n: Ref, i: Int :: { relaxed(g, n, i) } n in complement(stack, g) && 0 <= i < |edges2(g)[n]| ==> relaxed(g, n, i)) invariant solvable ==> all_relaxed(g) { //------------------ // Find min-d node: var min_idx: Int := 0 var min: Ref := stack[min_idx] var stack_i: Int := 1 label min_loop while (stack_i < |stack|) invariant graph_read(g) invariant 0 <= stack_i <= |stack| invariant stack == old[min_loop](stack) invariant 0 <= min_idx < |stack| invariant min == stack[min_idx] // min is indeed the minimum so far invariant forall i: Int :: 0 <= i < stack_i ==> dist_leq(min.d, stack[i].d) { if (dist_lt(stack[stack_i].d,min.d)) { assume forall d1: Distance :: dist_leq(stack[stack_i].d, min.d) && dist_leq(min.d, d1) ==> dist_leq(stack[stack_i].d, d1) min_idx := stack_i min := stack[stack_i] } stack_i := stack_i + 1 } //------------------------------------------ // If min exists, relax all outgoing edges, else break: if (min.d.isInf) { // All nodes in the stack are inf assert forall i: Int :: 0 <= i < |stack| ==> dist_leq(min.d,stack[i].d) && stack[i].d.isInf // assert dummy(forall i: Int :: 0 <= i < |stack| // ==> dist_leq(min.d,stack[i].d) && stack[i].d.isInf) continue := false } else { // Do all the relaxing var edge_i: Int := 0 assume inv_par_optimal(g, min, node_i, edge_i) // Was proven before, can do manually label edge_scan while (edge_i < |edges2(g)[min]|) invariant 0 <= edge_i <= |edges2(g)[min]| invariant graph_acc(g) invariant inv_sound(g) //&& inv_pi_valid(g) //&& inv_pi_correct(g) invariant inv_optimal_qtf(g, node_i, complement(stack,g)) invariant min == stack[min_idx] && min.d == old[edge_scan](min.d) invariant forall i: Int :: 0 <= i < edge_i ==> relaxed(g, min,i) // All n.d are (non-strictly) decreasing invariant forall n: Ref :: n in nodes(g) ==> !dist_lt(old[edge_scan](n.d), n.d) // Changed flag is correct (maybe not needed) invariant !changed == (forall n: Ref :: n in nodes(g) ==> n.d == old[dijk_scan](n.d)) // Unchanged means all edges are relaxed invariant !changed ==> (forall n: Ref, i: Int :: n in complement(stack, g) && 0 <= i < |edges2(g)[n]| ==> relaxed(g,n,i)) invariant !changed ==> (forall i: Int :: 0 <= i < edge_i ==> relaxed(g, min, i)) // Helps prove inv_complete invariant forall i: Int :: 0 <= i < edge_i ==> edges2(g)[min][i].d.isFin // Helps prove inv_optimal invariant forall i: Int :: 0 <= i < edge_i ==> dist_leq(edges2(g)[min][i].d, dist_plus(min.d, weights2(g)[min][i])) invariant inv_par_optimal(g, min, node_i, edge_i) // forall p: Path, i: Int :: // { valid_path(p,root(g),min,g), edges2(g)[min][i] } // (valid_edges(p,g) && length(p) == node_i-1 // && valid_path(p,root(g),min,g) && 0 <= i < edge_i) // ==> !dist_lt(Fin(path_cost(Step(min,i,p),g)),edges2(g)[min][i].d) { // This seems weirdly necessary to unfold the def of relaxed... assert forall _src: Ref, i: Int :: _src in nodes(g) && 0 <= i < |edges2(g)[_src]| ==> (relaxed(g,_src,i) == dist_leq(edges2(g)[_src][i].d, dist_plus(_src.d, weights2(g)[_src][i]))) // Relax var dst: Ref := edges2(g)[min][edge_i] var wgt: Int := weights2(g)[min][edge_i] if (dist_lt(dist_plus(min.d, wgt), dst.d)) { dst.d := dist_plus(min.d, wgt) dst.pi := min dst.pidx := edge_i changed := true lemma_reach_ext(min, edge_i, g) // required to maintain invariants } edge_i := edge_i + 1 assume inv_par_optimal(g, min, node_i, edge_i) //assume inv_pi_correct(g) // TODO: FIX assume inv_optimal_qtf(g, node_i, complement(stack,g)) } // Remove min element var prev_stack: Seq[Ref] := stack stack := stack[0..min_idx] ++ stack[min_idx+1..] if (|stack| == 0) { continue := false } // Establish inv_optimal assert inv_par_optimal(g, min, node_i, edge_i) assert inv_optimal_qtf(g, node_i, complement(prev_stack, g)) var nd_branch_3: Bool if (nd_branch_3) { var n: Ref var p: Path if (length(p) < node_i) { instantiate_inv_optimal(g, node_i, complement(prev_stack, g), n, p) } else { instantiate_inv_optimal(g, node_i, complement(prev_stack, g), n, p) instantiate_inv_optimal(g, node_i, complement(prev_stack, g), p.prv, p.tail) instantiate_inv_par_optimal(g, min, node_i, edge_i, p.tail, p.idx) } assert inv_optimal_body(g, node_i, complement(stack, g), n, p) assume false } assume inv_optimal_qtf(g, node_i, complement(stack, g)) } } node_i := node_i + 1 if (!changed) { // No more changes will occur solvable := true // No negative loops assert all_relaxed(g) } var nd_branch_4: Bool if (nd_branch_4) { var n: Ref var p: Path instantiate_inv_optimal(g, node_i-1, complement(stack, g), n, p) assert inv_optimal_body(g, node_i, Set(), n, p) assume false } assume inv_optimal_qtf(g, node_i, Set()) } // End main while if (solvable) { lemma_relaxed_loopfree(g) lemma_relaxed_invs(g, node_i) assert @reveal()post_condition(g, solvable) } else { assert node_i == |nodes(g)| lemma_changed_to_negloop(g) assert @reveal()post_condition(g, solvable) } }