a := ft.limits[v.Args[0].ID]
b := ft.limits[v.Args[1].ID]
return ft.newLimit(v, a.sub(b, 8))
+ case OpNeg64, OpNeg32, OpNeg16, OpNeg8:
+ a := ft.limits[v.Args[0].ID]
+ bitsize := uint(v.Type.Size()) * 8
+ return ft.newLimit(v, a.com(bitsize).add(limit{min: 1, max: 1, umin: 1, umax: 1}, bitsize))
case OpMul64:
a := ft.limits[v.Args[0].ID]
b := ft.limits[v.Args[1].ID]
return z
}
+func neg64(a uint64, ensureAllBranchesCouldHappen func() bool) uint64 {
+ var lo, hi uint64 = 0xff, 0xfff
+ a &= hi
+ a |= lo
+
+ z := -a
+
+ if ensureAllBranchesCouldHappen() && z > -lo { // ERROR "Disproved Less64U$"
+ return 42
+ }
+ if ensureAllBranchesCouldHappen() && z <= -lo { // ERROR "Proved Leq64U$"
+ return 1337
+ }
+ if ensureAllBranchesCouldHappen() && z < -hi { // ERROR "Disproved Less64U$"
+ return 42
+ }
+ if ensureAllBranchesCouldHappen() && z >= -hi { // ERROR "Proved Leq64U$"
+ return 1337
+ }
+ return z
+}
+func neg64mightOverflowDuringNeg(a uint64, ensureAllBranchesCouldHappen func() bool) uint64 {
+ var lo, hi uint64 = 0, 0xfff
+ a &= hi
+ a |= lo
+
+ z := -a
+
+ if ensureAllBranchesCouldHappen() && z > -lo {
+ return 42
+ }
+ if ensureAllBranchesCouldHappen() && z <= -lo {
+ return 1337
+ }
+ if ensureAllBranchesCouldHappen() && z < -hi {
+ return 42
+ }
+ if ensureAllBranchesCouldHappen() && z >= -hi {
+ return 1337
+ }
+ return z
+}
+
//go:noinline
func useInt(a int) {
}