diff --git a/src/main/scala/interfaces/state/Chunks.scala b/src/main/scala/interfaces/state/Chunks.scala index 201c76eeb..641188f15 100644 --- a/src/main/scala/interfaces/state/Chunks.scala +++ b/src/main/scala/interfaces/state/Chunks.scala @@ -17,13 +17,19 @@ trait GeneralChunk extends Chunk { val resourceID: ResourceID val id: ChunkIdentifer val perm: Term - def withPerm(perm: Term): GeneralChunk + def applyCondition(newCond: Term): GeneralChunk + def permMinus(perm: Term): GeneralChunk + def permPlus(perm: Term): GeneralChunk } trait NonQuantifiedChunk extends GeneralChunk { val args: Seq[Term] val snap: Term - override def withPerm(perm: Term): NonQuantifiedChunk + + override def applyCondition(newCond: Term): NonQuantifiedChunk + override def permMinus(perm: Term): NonQuantifiedChunk + override def permPlus(perm: Term): NonQuantifiedChunk + def withPerm(perm: Term): NonQuantifiedChunk def withSnap(snap: Term): NonQuantifiedChunk } @@ -31,6 +37,9 @@ trait QuantifiedChunk extends GeneralChunk { val quantifiedVars: Seq[Var] def snapshotMap: Term def valueAt(arguments: Seq[Term]): Term - override def withPerm(perm: Term): QuantifiedChunk + + override def applyCondition(newCond: Term): QuantifiedChunk + override def permMinus(perm: Term): QuantifiedChunk + override def permPlus(perm: Term): QuantifiedChunk def withSnapshotMap(snap: Term): QuantifiedChunk } \ No newline at end of file diff --git a/src/main/scala/logger/records/data/SingleMergeRecord.scala b/src/main/scala/logger/records/data/SingleMergeRecord.scala index 7137aff56..04dc530ad 100644 --- a/src/main/scala/logger/records/data/SingleMergeRecord.scala +++ b/src/main/scala/logger/records/data/SingleMergeRecord.scala @@ -8,13 +8,13 @@ package viper.silicon.logger.records.data import viper.silicon.common.collections.immutable.InsertionOrderedSet import viper.silicon.decider.PathConditionStack -import viper.silicon.interfaces.state.NonQuantifiedChunk +import viper.silicon.interfaces.state.Chunk import viper.silicon.state._ import viper.silicon.state.terms.Term import viper.silver.ast -class SingleMergeRecord(val destChunks: Seq[NonQuantifiedChunk], - val newChunks: Seq[NonQuantifiedChunk], +class SingleMergeRecord(val destChunks: Seq[Chunk], + val newChunks: Seq[Chunk], p: PathConditionStack) extends DataRecord { val value: ast.Node = null val state: State = null diff --git a/src/main/scala/logger/writer/SymbExLogReportWriter.scala b/src/main/scala/logger/writer/SymbExLogReportWriter.scala index 8b3d9928f..bff6e301b 100644 --- a/src/main/scala/logger/writer/SymbExLogReportWriter.scala +++ b/src/main/scala/logger/writer/SymbExLogReportWriter.scala @@ -58,32 +58,32 @@ object SymbExLogReportWriter { "perm" -> TermWriter.toJSON(perm) ) - case QuantifiedFieldChunk(id, fvf, perm, invs, cond, receiver, hints) => + case QuantifiedFieldChunk(id, fvf, condition, perm, invs, receiver, hints) => JsObject( "type" -> JsString("quantified_field_chunk"), "field" -> JsString(id.toString), "field_value_function" -> TermWriter.toJSON(fvf), + "condition" -> TermWriter.toJSON(condition), "perm" -> TermWriter.toJSON(perm), "invs" -> invs.map(inverseFunctionsToJSON).getOrElse(JsNull), - "cond" -> cond.map(TermWriter.toJSON).getOrElse(JsNull), "receiver" -> receiver.map(TermWriter.toJSON).getOrElse(JsNull), "hints" -> (if (hints.nonEmpty) JsArray(hints.map(TermWriter.toJSON).toVector) else JsNull) ) - case QuantifiedPredicateChunk(id, vars, psf, perm, invs, cond, singletonArgs, hints) => + case QuantifiedPredicateChunk(id, vars, psf, condition, perm, invs, singletonArgs, hints) => JsObject( "type" -> JsString("quantified_predicate_chunk"), "vars" -> JsArray(vars.map(TermWriter.toJSON).toVector), "predicate" -> JsString(id.toString), "predicate_snap_function" -> TermWriter.toJSON(psf), + "condition" -> TermWriter.toJSON(condition), "perm" -> TermWriter.toJSON(perm), "invs" -> invs.map(inverseFunctionsToJSON).getOrElse(JsNull), - "cond" -> cond.map(TermWriter.toJSON).getOrElse(JsNull), "singleton_args" -> singletonArgs.map(as => JsArray(as.map(TermWriter.toJSON).toVector)).getOrElse(JsNull), "hints" -> (if (hints.nonEmpty) JsArray(hints.map(TermWriter.toJSON).toVector) else JsNull) ) - case QuantifiedMagicWandChunk(id, vars, wsf, perm, invs, cond, singletonArgs, hints) => + case QuantifiedMagicWandChunk(id, vars, wsf, perm, invs, singletonArgs, hints) => JsObject( "type" -> JsString("quantified_magic_wand_chunk"), "vars" -> JsArray(vars.map(TermWriter.toJSON).toVector), @@ -91,7 +91,6 @@ object SymbExLogReportWriter { "wand_snap_function" -> TermWriter.toJSON(wsf), "perm" -> TermWriter.toJSON(perm), "invs" -> invs.map(inverseFunctionsToJSON).getOrElse(JsNull), - "cond" -> cond.map(TermWriter.toJSON).getOrElse(JsNull), "singleton_args" -> singletonArgs.map(as => JsArray(as.map(TermWriter.toJSON).toVector)).getOrElse(JsNull), "hints" -> (if (hints.nonEmpty) JsArray(hints.map(TermWriter.toJSON).toVector) else JsNull) ) diff --git a/src/main/scala/rules/Executor.scala b/src/main/scala/rules/Executor.scala index c01e72515..92be2f1c1 100644 --- a/src/main/scala/rules/Executor.scala +++ b/src/main/scala/rules/Executor.scala @@ -478,7 +478,8 @@ object executor extends ExecutionRules { val ch = quantifiedChunkSupporter.createSingletonQuantifiedChunk(Seq(`?r`), field, Seq(tRcvr), FullPerm, sm, s.program) if (s3.heapDependentTriggers.contains(field)) v3.decider.assume(FieldTrigger(field.name, sm, tRcvr)) - Q(s3.copy(h = h3 + ch), v3) + val (fr4, h4) = v3.stateConsolidator(s3).merge(s3.functionRecorder, h3, ch, v3) + Q(s3.copy(h = h4, functionRecorder = fr4), v3) } ) })) diff --git a/src/main/scala/rules/QuantifiedChunkSupport.scala b/src/main/scala/rules/QuantifiedChunkSupport.scala index f87812d2b..28d67cc3c 100644 --- a/src/main/scala/rules/QuantifiedChunkSupport.scala +++ b/src/main/scala/rules/QuantifiedChunkSupport.scala @@ -22,7 +22,7 @@ import viper.silicon.state.terms.perms.IsPositive import viper.silicon.state.terms.perms.BigPermSum import viper.silicon.state.terms.predef.`?r` import viper.silicon.state.terms.utils.consumeExactRead -import viper.silicon.supporters.functions.NoopFunctionRecorder +import viper.silicon.supporters.functions.{FunctionRecorder, NoopFunctionRecorder} import viper.silicon.utils.notNothing.NotNothing import viper.silicon.verifier.Verifier import viper.silver.reporter.InternalWarningMessage @@ -205,6 +205,35 @@ trait QuantifiedChunkSupport extends SymbolicExecutionRules { def hintBasedChunkOrderHeuristic(hints: Seq[Term]) : Seq[QuantifiedBasicChunk] => Seq[QuantifiedBasicChunk] + + def findChunk(chunks: Iterable[Chunk], chunk: QuantifiedChunk, v: Verifier): Option[QuantifiedChunk] + + /** Merge the snapshots of two quantified heap chunks that denote the same field locations + * + * @param fr The functionRecorder to use when new snapshot maps are generated. + * @param field The name of the field. + * @param t1 The first chunk's snapshot map. + * @param t2 The second chunk's snapshot map. + * @param p1 The first chunk's permission amount, should be constrained by the domain. + * @param p2 The second chunk's permission amount, should be constrained by the domain. + * @param v The verifier to use. + * @return A tuple (fr, sm, def) of functionRecorder, a snapshot map sm and a term def constraining sm. + */ + def combineFieldSnapshotMaps(fr: FunctionRecorder, field: String, t1: Term, t2: Term, p1: Term, p2: Term, v: Verifier): (FunctionRecorder, Term, Term) + + /** Merge the snapshots of two quantified heap chunks that denote the same predicate + * + * @param fr The functionRecorder to use when new snapshot maps are generated. + * @param predicate The name of the predicate. + * @param qVars The variables over which p1 and p2 are defined + * @param t1 The first chunk's snapshot map. + * @param t2 The second chunk's snapshot map. + * @param p1 The first chunk's permission amount, should be constrained by the domain. + * @param p2 The second chunk's permission amount, should be constrained by the domain. + * @param v The verifier to use. + * @return A tuple (fr, sm, def) of functionRecorder, a snapshot map sm and a term def constraining sm. + */ + def combinePredicateSnapshotMaps(fr: FunctionRecorder, predicate: String, qVars: Seq[Var], t1: Term, t2: Term, p1: Term, p2: Term, v: Verifier): (FunctionRecorder, Term, Term) } object quantifiedChunkSupporter extends QuantifiedChunkSupport { @@ -225,8 +254,6 @@ object quantifiedChunkSupporter extends QuantifiedChunkSupport { .zip(arguments) .map { case (x, a) => x === a }) - val conditionalizedPermissions = - Ite(condition, permissions, NoPerm) val hints = extractHints(None, arguments) @@ -235,9 +262,9 @@ object quantifiedChunkSupporter extends QuantifiedChunkSupport { resource, arguments, sm, - conditionalizedPermissions, + condition, + permissions, None, - Some(conditionalizedPermissions), Some(arguments), hints, program) @@ -271,11 +298,8 @@ object quantifiedChunkSupporter extends QuantifiedChunkSupport { val qvarsToInversesOfCodomain = inverseFunctions.qvarsToInversesOf(codomainQVars) - val conditionalizedPermissions = - Ite( - And(And(imagesOfCodomain), condition.replace(qvarsToInversesOfCodomain)), - permissions.replace(qvarsToInversesOfCodomain), - NoPerm) + val cond = And(And(imagesOfCodomain), condition.replace(qvarsToInversesOfCodomain)) + val perms = permissions.replace(qvarsToInversesOfCodomain) val hints = extractHints(Some(condition), arguments) @@ -285,9 +309,9 @@ object quantifiedChunkSupporter extends QuantifiedChunkSupport { resource, arguments, sm, - conditionalizedPermissions, + cond, + perms, Some(inverseFunctions), - Some(conditionalizedPermissions), None, hints, program) @@ -323,9 +347,9 @@ object quantifiedChunkSupporter extends QuantifiedChunkSupport { resource: ast.Resource, arguments: Seq[Term], sm: Term, + condition: Term, permissions: Term, optInverseFunctions: Option[InverseFunctions], - optInitialCond: Option[Term], optSingletonArguments: Option[Seq[Term]], hints: Seq[Term], program: ast.Program) @@ -339,9 +363,9 @@ object quantifiedChunkSupporter extends QuantifiedChunkSupport { QuantifiedFieldChunk( BasicChunkIdentifier(field.name), sm, + condition, permissions, optInverseFunctions, - optInitialCond, optSingletonArguments.map(_.head), hints) @@ -350,20 +374,20 @@ object quantifiedChunkSupporter extends QuantifiedChunkSupport { BasicChunkIdentifier(predicate.name), codomainQVars, sm, + condition, permissions, optInverseFunctions, - optInitialCond, optSingletonArguments, hints) case wand: ast.MagicWand => + val conditionalizedPermissions = Ite(condition, permissions, NoPerm) QuantifiedMagicWandChunk( MagicWandIdentifier(wand, program), codomainQVars, sm, - permissions, + conditionalizedPermissions, optInverseFunctions, - optInitialCond, optSingletonArguments, hints) @@ -1055,8 +1079,8 @@ object quantifiedChunkSupporter extends QuantifiedChunkSupport { val forall = ast.Forall(Seq(), Seq(), ast.TrueLit()())() // Just used to get the position for a potentioal warning. produce(s2, forall, resource, qvars, formalQVars, qid, optTrigger, tTriggers, auxGlobals, auxNonGlobals, tCond, tArgs, freshSnap, permissionToProduce, pve, negativePermissionReason, notInjectiveReason, v)( (s3, ch, v3) => { - val h3 = h + ch - doConsume(s3, h3, resource, qvars, formalQVars, qid, optTrigger, tTriggers, auxGlobals, auxNonGlobals, tCond, tArgs, tPerm, pve, negativePermissionReason, notInjectiveReason, insufficientPermissionReason, v3)(Q) + val (fr3, h3) = v3.stateConsolidator(s3).merge(s3.functionRecorder, h, ch, v3) + doConsume(s3.copy(functionRecorder = fr3), h3, resource, qvars, formalQVars, qid, optTrigger, tTriggers, auxGlobals, auxNonGlobals, tCond, tArgs, tPerm, pve, negativePermissionReason, notInjectiveReason, insufficientPermissionReason, v3)(Q) } ) } @@ -1323,7 +1347,7 @@ object quantifiedChunkSupporter extends QuantifiedChunkSupport { //val newTopHeap = hs1.head + cHeap1 val totalConsumedAmount = cHeap1.values.foldLeft(NoPerm: Term)((q, ch) => PermPlus(q, ch.asInstanceOf[GeneralChunk].perm)) val totalConsumedFromFirst = if (optChunks.length > 0 && optChunks.head.nonEmpty) { - PermMin(optChunks.head.get.asInstanceOf[QuantifiedBasicChunk].perm, totalConsumedAmount) + PermMin(optChunks.head.get.perm, totalConsumedAmount) } else { NoPerm } @@ -1331,7 +1355,10 @@ object quantifiedChunkSupporter extends QuantifiedChunkSupport { val nonEmptyChunks = optChunks.filter(_.isDefined) - val cHeap2 = if (nonEmptyChunks.isEmpty) Heap() else Heap(Seq(nonEmptyChunks.head.get.asInstanceOf[QuantifiedBasicChunk].withPerm(totalConsumedFromAllButFirst))) + val cHeap2 = if (nonEmptyChunks.isEmpty) + Heap() + else + Heap(Seq(nonEmptyChunks.head.get.withPerm(totalConsumedFromAllButFirst))) val (fr2, newTopHeap2) = if (nonEmptyChunks.isEmpty) (s1.functionRecorder, s.h) else @@ -1366,8 +1393,8 @@ object quantifiedChunkSupporter extends QuantifiedChunkSupport { } val freshSnap = v.decider.fresh(snapSort) produceSingleLocation(s2, resource, codomainQVars, arguments, freshSnap, permissionToProduce, trigger, v)((s3, ch, v3) => { - val h3 = h + ch - doConsumeSingleLocation(s3, h3, codomainQVars, arguments, resourceAccess, permissions, optChunkOrderHeuristic, pve, v3)(Q) + val (fr4, h4) = v3.stateConsolidator(s3).merge(s3.functionRecorder, h, ch, v3) + doConsumeSingleLocation(s3.copy(functionRecorder = fr4), h4, codomainQVars, arguments, resourceAccess, permissions, optChunkOrderHeuristic, pve, v3)(Q) }) } } else { @@ -1627,7 +1654,7 @@ object quantifiedChunkSupporter extends QuantifiedChunkSupport { v.decider.assume(permissionConstraint) remainingChunks = - remainingChunks :+ ithChunk.withPerm(PermMinus(ithChunk.perm, ithPTaken)) + remainingChunks :+ ithChunk.permMinus(ithPTaken) consumedChunks = consumedChunks :+ ithChunk.withPerm(ithPTaken) } else { v.decider.prover.comment(s"Chunk depleted?") @@ -1640,7 +1667,7 @@ object quantifiedChunkSupporter extends QuantifiedChunkSupport { consumedChunks = consumedChunks :+ ithChunk.withPerm(ithPTaken) if (!chunkDepleted) { remainingChunks = - remainingChunks :+ ithChunk.withPerm(PermMinus(ithChunk.perm, ithPTaken)) + remainingChunks :+ ithChunk.permMinus(ithPTaken) } } } @@ -1963,6 +1990,125 @@ object quantifiedChunkSupporter extends QuantifiedChunkSupport { matchingChunks ++ otherChunks } + override def findChunk(chunks: Iterable[Chunk], chunk: QuantifiedChunk, v: Verifier): Option[QuantifiedChunk] = { + val lr = chunk match { + case qfc: QuantifiedFieldChunk if qfc.invs.isDefined => + Left(qfc.invs.get.invertibles, qfc.quantifiedVars, qfc.condition) + case qfc: QuantifiedFieldChunk if qfc.singletonArguments.isDefined => + Right(qfc.singletonArguments.get) + case qpc: QuantifiedPredicateChunk if qpc.invs.isDefined => + Left(qpc.invs.get.invertibles, qpc.quantifiedVars, qpc.condition) + case qpc: QuantifiedPredicateChunk if qpc.singletonArguments.isDefined => + Right(qpc.singletonArguments.get) + case _ => return None + } + val relevantChunks: Iterable[QuantifiedBasicChunk] = chunks.flatMap { + case ch: QuantifiedBasicChunk if ch.id == chunk.id => Some(ch) + case _ => None + } + + val (receiverTerms, quantVars, cond) = lr match { + case Left(tuple) => tuple + case Right(singletonArguments) => + return relevantChunks.find { ch => + val chunkInfo = ch match { + case qfc: QuantifiedFieldChunk if qfc.singletonArguments.isDefined => + Some(qfc.singletonArguments.get) + case qpc: QuantifiedPredicateChunk if qpc.singletonArguments.isDefined => + Some(qpc.singletonArguments.get) + case _ => None + } + chunkInfo match { + case Some(cSingletonArguments) => + val equalityTerm = And(singletonArguments.zip(cSingletonArguments).map { case (a, b) => a === b }) + val result = v.decider.check(equalityTerm, Verifier.config.checkTimeout()) + if (result) { + // Learn the equality + v.decider.assume(equalityTerm) + } + result + case _ => false + } + } + } + relevantChunks.find { ch => + val chunkInfo = ch match { + case qfc: QuantifiedFieldChunk if qfc.invs.isDefined => + Some(qfc.invs.get.invertibles, qfc.quantifiedVars, qfc.condition) + case qpc: QuantifiedPredicateChunk if qpc.invs.isDefined => + Some(qpc.invs.get.invertibles, qpc.quantifiedVars, qpc.condition) + case _ => None + } + chunkInfo match { + case Some((cInvertibles, cQvars, cCond)) => + receiverTerms.zip(cInvertibles).forall(p => { + if (cQvars.length == quantVars.length && cQvars.zip(quantVars).forall(vars => vars._1.sort == vars._2.sort)) { + val condReplaced = cCond.replace(cQvars, quantVars) + val secondReplaced = p._2.replace(cQvars, quantVars) + val equalityTerm = SimplifyingForall(quantVars, And(Seq(p._1 === secondReplaced, cond === condReplaced)), Seq()) + val result = v.decider.check(equalityTerm, Verifier.config.checkTimeout()) + if (result) { + // Learn the equality + v.decider.assume(equalityTerm) + } + result + } else { + false + } + }) + case _ => false + } + } + } + + // Based on StateConsolidator#combineSnapshots + override def combineFieldSnapshotMaps(fr: FunctionRecorder, field: String, t1: Term, t2: Term, p1: Term, p2: Term, v: Verifier): (FunctionRecorder, Term, Term) = { + val lookupT1 = Lookup(field, t1, `?r`) + val lookupT2 = Lookup(field, t2, `?r`) + val (fr2, sm, smDef, triggers) = (IsPositive(p1), IsPositive(p2)) match { + // If we statically know that both permission values are positive we can forego the quantifier + case (True, True) => return (fr, t1, t1 === t2) + case (True, b2) => (fr, t1, Implies(b2, lookupT1 === lookupT2), Seq(Trigger(lookupT1), Trigger(lookupT2))) + case (b1, True) => (fr, t2, Implies(b1, lookupT2 === lookupT1), Seq(Trigger(lookupT2), Trigger(lookupT1))) + case (b1, b2) => + /* + * Since it is not definitely known whether p1 and p2 are positive, + * we have to introduce a fresh snapshot. Note that it is not sound + * to use t1 or t2 and constrain it. + */ + val t3 = v.decider.fresh(t1.sort) + val lookupT3 = Lookup(field, t3, `?r`) + (fr.recordConstrainedVar(t3, And(Implies(b1, lookupT3 === lookupT1), Implies(b2, lookupT3 === lookupT2))), t3, + And(Implies(b1, lookupT3 === lookupT1), Implies(b2, lookupT3 === lookupT2)), Seq(Trigger(lookupT1), Trigger(lookupT2), Trigger(lookupT3))) + } + + (fr2, sm, Forall(`?r`, smDef, triggers)) + } + + // Based on StateConsolidator#combineSnapshots + override def combinePredicateSnapshotMaps(fr: FunctionRecorder, predicate: String, qVars: Seq[Var], t1: Term, t2: Term, p1: Term, p2: Term, v: Verifier): (FunctionRecorder, Term, Term) = { + val lookupT1 = PredicateLookup(predicate, t1, qVars) + val lookupT2 = PredicateLookup(predicate, t2, qVars) + val (fr2, sm, smDef, triggers) = (IsPositive(p1), IsPositive(p2)) match { + // If we statically know that both permission values are positive we can forego the quantifier + case (True, True) => return (fr, t1, t1 === t2) + case (True, b2) => (fr, t1, Implies(b2, lookupT1 === lookupT2), Seq(Trigger(lookupT1), Trigger(lookupT2))) + case (b1, True) => (fr, t2, Implies(b1, lookupT2 === lookupT1), Seq(Trigger(lookupT2), Trigger(lookupT1))) + case (b1, b2) => + /* + * Since it is not definitely known whether p1 and p2 are positive, + * we have to introduce a fresh snapshot. Note that it is not sound + * to use t1 or t2 and constrain it. + */ + val t3 = v.decider.fresh(t1.sort) + val lookupT3 = PredicateLookup(predicate, t3, qVars) + (fr.recordConstrainedVar(t3, And(Implies(b1, lookupT3 === lookupT1), Implies(b2, lookupT3 === lookupT2))), t3, + And(Implies(b1, lookupT3 === lookupT1), Implies(b2, lookupT3 === lookupT2)), Seq(Trigger(lookupT1), Trigger(lookupT2), Trigger(lookupT3))) + } + + (fr2, sm, Forall(qVars, smDef, triggers)) + } + def qpAppChunkOrderHeuristics(receiverTerms: Seq[Term], quantVars: Seq[Var], hints: Seq[Term], v: Verifier) : Seq[QuantifiedBasicChunk] => Seq[QuantifiedBasicChunk] = { // Heuristics that looks for quantified chunks that have the same shape (as in, the same number and types of diff --git a/src/main/scala/rules/StateConsolidator.scala b/src/main/scala/rules/StateConsolidator.scala index 8219512f5..a383eaf4f 100644 --- a/src/main/scala/rules/StateConsolidator.scala +++ b/src/main/scala/rules/StateConsolidator.scala @@ -26,7 +26,7 @@ trait StateConsolidationRules extends SymbolicExecutionRules { def consolidate(s: State, v: Verifier): State def consolidateOptionally(s: State, v: Verifier): State def merge(fr: FunctionRecorder, h: Heap, newH: Heap, v: Verifier): (FunctionRecorder, Heap) - def merge(fr: FunctionRecorder, h: Heap, ch: NonQuantifiedChunk, v: Verifier): (FunctionRecorder, Heap) + def merge(fr: FunctionRecorder, h: Heap, ch: Chunk, v: Verifier): (FunctionRecorder, Heap) protected def assumeUpperPermissionBoundForQPFields(s: State, v: Verifier): State protected def assumeUpperPermissionBoundForQPFields(s: State, heaps: Seq[Heap], v: Verifier): State @@ -44,7 +44,7 @@ class MinimalStateConsolidator extends StateConsolidationRules { def merge(fr: FunctionRecorder, h: Heap, newH: Heap, v: Verifier): (FunctionRecorder, Heap) = (fr, Heap(h.values ++ newH.values)) - def merge(fr: FunctionRecorder, h: Heap, ch: NonQuantifiedChunk, v: Verifier): (FunctionRecorder, Heap) = + def merge(fr: FunctionRecorder, h: Heap, ch: Chunk, v: Verifier): (FunctionRecorder, Heap) = (fr, h + ch) protected def assumeUpperPermissionBoundForQPFields(s: State, @unused v: Verifier): State = s @@ -66,13 +66,11 @@ class DefaultStateConsolidator(protected val config: Config) extends StateConsol val (functionRecorderAfterHeapMerging, mergedHeaps) = initialHeaps.foldLeft((s.functionRecorder, Nil: List[Heap])) { case ((fr, hs), h) => - val (nonQuantifiedChunks, otherChunks) = partition(h) - var continue = false - var mergedChunks: Seq[NonQuantifiedChunk] = Nil - var destChunks: Seq[NonQuantifiedChunk] = Nil - var newChunks: Seq[NonQuantifiedChunk] = nonQuantifiedChunks + var mergedChunks: Seq[Chunk] = Nil + var destChunks: Seq[Chunk] = Nil + var newChunks: Seq[Chunk] = h.values.toSeq var functionRecorder: FunctionRecorder = fr var fixedPointRound: Int = 0 @@ -94,10 +92,9 @@ class DefaultStateConsolidator(protected val config: Config) extends StateConsol fixedPointRound = fixedPointRound + 1 } while (continue) - val allChunks = mergedChunks ++ otherChunks - val interpreter = new NonQuantifiedPropertyInterpreter(allChunks, v) + val interpreter = new NonQuantifiedPropertyInterpreter(mergedChunks, v) - mergedChunks foreach { ch => + mergedChunks.filter(_.isInstanceOf[BasicChunk]) foreach { case ch: BasicChunk => val resource = Resources.resourceDescriptions(ch.resourceID) v.decider.assume(interpreter.buildPathConditionsForChunk(ch, resource.instanceProperties)) } @@ -107,7 +104,7 @@ class DefaultStateConsolidator(protected val config: Config) extends StateConsol } v.symbExLog.closeScope(sepIdentifier) - (functionRecorder, hs :+ Heap(allChunks)) + (functionRecorder, hs :+ Heap(mergedChunks)) } val s1 = s.copy(functionRecorder = functionRecorderAfterHeapMerging, @@ -123,43 +120,41 @@ class DefaultStateConsolidator(protected val config: Config) extends StateConsol if (s.retrying) consolidate(s, v) else s - def merge(fr: FunctionRecorder, h: Heap, ch: NonQuantifiedChunk, v: Verifier): (FunctionRecorder, Heap) = { + def merge(fr: FunctionRecorder, h: Heap, ch: Chunk, v: Verifier): (FunctionRecorder, Heap) = { merge(fr, h, Heap(Seq(ch)), v) } def merge(fr1: FunctionRecorder, h: Heap, newH: Heap, v: Verifier): (FunctionRecorder, Heap) = { val mergeLog = new CommentRecord("Merge", null, v.decider.pcs) val sepIdentifier = v.symbExLog.openScope(mergeLog) - val (nonQuantifiedChunks, otherChunks) = partition(h) - val (newNonQuantifiedChunks, newOtherChunk) = partition(newH) - val (fr2, mergedChunks, newlyAddedChunks, snapEqs) = singleMerge(fr1, nonQuantifiedChunks, newNonQuantifiedChunks, v) + val (fr2, mergedChunks, newlyAddedChunks, snapEqs) = singleMerge(fr1, h.values.toSeq, newH.values.toSeq, v) v.decider.assume(snapEqs) val interpreter = new NonQuantifiedPropertyInterpreter(mergedChunks, v) - newlyAddedChunks foreach { ch => + newlyAddedChunks.filter(_.isInstanceOf[BasicChunk]) foreach { case ch: BasicChunk => val resource = Resources.resourceDescriptions(ch.resourceID) v.decider.assume(interpreter.buildPathConditionsForChunk(ch, resource.instanceProperties)) } v.symbExLog.closeScope(sepIdentifier) - (fr2, Heap(mergedChunks ++ otherChunks ++ newOtherChunk)) + (fr2, Heap(mergedChunks)) } private def singleMerge(fr: FunctionRecorder, - destChunks: Seq[NonQuantifiedChunk], - newChunks: Seq[NonQuantifiedChunk], + destChunks: Seq[Chunk], + newChunks: Seq[Chunk], v: Verifier) : (FunctionRecorder, - Seq[NonQuantifiedChunk], - Seq[NonQuantifiedChunk], + Seq[Chunk], + Seq[Chunk], InsertionOrderedSet[Term]) = { val mergeLog = new SingleMergeRecord(destChunks, newChunks, v.decider.pcs) val sepIdentifier = v.symbExLog.openScope(mergeLog) // bookkeeper.heapMergeIterations += 1 - val initial = (fr, destChunks, Seq[NonQuantifiedChunk](), InsertionOrderedSet[Term]()) + val initial = (fr, destChunks, Seq[Chunk](), InsertionOrderedSet[Term]()) val result = newChunks.foldLeft(initial) { case ((fr1, accMergedChunks, accNewChunks, accSnapEqs), nextChunk) => /* accMergedChunks: already merged chunks @@ -169,7 +164,7 @@ class DefaultStateConsolidator(protected val config: Config) extends StateConsol * sequence of destination chunks */ - chunkSupporter.findMatchingChunk(accMergedChunks, nextChunk, v) match { + findMatchingChunk(accMergedChunks, nextChunk, v) match { case Some(ch) => mergeChunks(fr1, ch, nextChunk, v) match { case Some((fr2, newChunk, snapEq)) => @@ -185,14 +180,38 @@ class DefaultStateConsolidator(protected val config: Config) extends StateConsol result } + private def findMatchingChunk(chunks: Iterable[Chunk], chunk: Chunk, v: Verifier): Option[Chunk] = { + chunk match { + case chunk: BasicChunk => + chunkSupporter.findChunk[BasicChunk](chunks, chunk.id, chunk.args, v) + case chunk: QuantifiedChunk => quantifiedChunkSupporter.findChunk(chunks, chunk, v) + case _ => None + } + } + // Merges two chunks that are aliases (i.e. that have the same id and the args are proven to be equal) // and returns the merged chunk or None, if the chunks could not be merged - private def mergeChunks(fr1: FunctionRecorder, chunk1: NonQuantifiedChunk, chunk2: NonQuantifiedChunk, v: Verifier) = (chunk1, chunk2) match { + private def mergeChunks(fr1: FunctionRecorder, chunk1: Chunk, chunk2: Chunk, v: Verifier) = (chunk1, chunk2) match { case (BasicChunk(rid1, id1, args1, snap1, perm1), BasicChunk(_, _, _, snap2, perm2)) => val (fr2, combinedSnap, snapEq) = combineSnapshots(fr1, snap1, snap2, perm1, perm2, v) Some(fr2, BasicChunk(rid1, id1, args1, combinedSnap, PermPlus(perm1, perm2)), snapEq) - case (_, _) => + case (l@QuantifiedFieldChunk(id1, fvf1, condition1, perm1, invs1, singletonRcvr1, hints1), + r@QuantifiedFieldChunk(_, fvf2, _, perm2, _, _, hints2)) => + assert(l.quantifiedVars == Seq(`?r`)) + assert(r.quantifiedVars == Seq(`?r`)) + // We need to use l.perm/r.perm here instead of perm1 and perm2 since the permission amount might be dependent on the condition/domain + val (fr2, combinedSnap, snapEq) = quantifiedChunkSupporter.combineFieldSnapshotMaps(fr1, id1.name, fvf1, fvf2, l.perm, r.perm, v) + val permSum = PermPlus(perm1, perm2) + val bestHints = if (hints1.nonEmpty) hints1 else hints2 + Some(fr2, QuantifiedFieldChunk(id1, combinedSnap, condition1, permSum, invs1, singletonRcvr1, bestHints), snapEq) + case (l@QuantifiedPredicateChunk(id1, qVars1, psf1, _, perm1, _, _, _), + r@QuantifiedPredicateChunk(_, qVars2, psf2, condition2, perm2, invs2, singletonArgs2, hints2)) => + val (fr2, combinedSnap, snapEq) = quantifiedChunkSupporter.combinePredicateSnapshotMaps(fr1, id1.name, qVars2, psf1, psf2, l.perm.replace(qVars1, qVars2), r.perm, v) + + val permSum = PermPlus(perm1.replace(qVars1, qVars2), perm2) + Some(fr2, QuantifiedPredicateChunk(id1, qVars2, combinedSnap, condition2, permSum, invs2, singletonArgs2, hints2), snapEq) + case _ => None } @@ -339,7 +358,7 @@ class MinimalRetryingStateConsolidator(config: Config) extends RetryingStateCons override def merge(fr: FunctionRecorder, h: Heap, newH: Heap, v: Verifier): (FunctionRecorder, Heap) = (fr, Heap(h.values ++ newH.values)) - override def merge(fr: FunctionRecorder, h: Heap, ch: NonQuantifiedChunk, v: Verifier): (FunctionRecorder, Heap) = + override def merge(fr: FunctionRecorder, h: Heap, ch: Chunk, v: Verifier): (FunctionRecorder, Heap) = (fr, h + ch) override protected def assumeUpperPermissionBoundForQPFields(s: State, @unused v: Verifier): State = s diff --git a/src/main/scala/rules/chunks/ChunkSupporter.scala b/src/main/scala/rules/chunks/ChunkSupporter.scala index 75d6d6e3a..4613c782e 100644 --- a/src/main/scala/rules/chunks/ChunkSupporter.scala +++ b/src/main/scala/rules/chunks/ChunkSupporter.scala @@ -44,12 +44,6 @@ trait ChunkSupportRules extends SymbolicExecutionRules { (Q: (State, Heap, Term, Verifier) => VerificationResult) : VerificationResult - def findMatchingChunk[CH <: NonQuantifiedChunk: ClassTag] - (chunks: Iterable[Chunk], - chunk: CH, - v: Verifier) - : Option[CH] - def findChunksWithID[CH <: NonQuantifiedChunk: ClassTag] (chunks: Iterable[Chunk], id: ChunkIdentifer) @@ -278,7 +272,7 @@ object chunkSupporter extends ChunkSupportRules { } } - private[chunks] def findChunk[CH <: NonQuantifiedChunk: ClassTag] + def findChunk[CH <: NonQuantifiedChunk: ClassTag] (chunks: Iterable[Chunk], id: ChunkIdentifer, args: Iterable[Term], @@ -288,11 +282,6 @@ object chunkSupporter extends ChunkSupportRules { findChunkLiterally(relevantChunks, args) orElse findChunkWithProver(relevantChunks, args, v) } - def findMatchingChunk[CH <: NonQuantifiedChunk: ClassTag] - (chunks: Iterable[Chunk], chunk: CH, v: Verifier): Option[CH] = { - findChunk[CH](chunks, chunk.id, chunk.args, v) - } - def findChunksWithID[CH <: NonQuantifiedChunk: ClassTag](chunks: Iterable[Chunk], id: ChunkIdentifer): Iterable[CH] = { chunks.flatMap { case c: CH if id == c.id => Some(c) diff --git a/src/main/scala/state/Chunks.scala b/src/main/scala/state/Chunks.scala index 1e5d3ceec..6a3acce73 100644 --- a/src/main/scala/state/Chunks.scala +++ b/src/main/scala/state/Chunks.scala @@ -40,6 +40,9 @@ case class BasicChunk(resourceID: BaseID, //case PredicateID => require(snap.sort == sorts.Snap, s"A predicate chunk's snapshot ($snap) is expected to be of sort Snap, but found ${snap.sort}") //} + override def applyCondition(newCond: Term) = withPerm(Ite(newCond, perm, NoPerm)) + override def permMinus(newPerm: Term) = withPerm(PermMinus(perm, newPerm)) + override def permPlus(newPerm: Term) = withPerm(PermPlus(perm, newPerm)) override def withPerm(newPerm: Term) = BasicChunk(resourceID, id, args, snap, newPerm) override def withSnap(newSnap: Term) = BasicChunk(resourceID, id, args, newSnap, perm) @@ -51,7 +54,11 @@ case class BasicChunk(resourceID: BaseID, sealed trait QuantifiedBasicChunk extends QuantifiedChunk { override val id: ChunkIdentifer - override def withPerm(perm: Term): QuantifiedBasicChunk + + override def applyCondition(newCond: Term): QuantifiedBasicChunk + override def permMinus(perm: Term): QuantifiedBasicChunk + override def permPlus(perm: Term): QuantifiedBasicChunk + def withPerm(perm: Term): QuantifiedBasicChunk override def withSnapshotMap(snap: Term): QuantifiedBasicChunk def singletonArguments: Option[Seq[Term]] def hints: Seq[Term] @@ -64,19 +71,20 @@ sealed trait QuantifiedBasicChunk extends QuantifiedChunk { */ case class QuantifiedFieldChunk(id: BasicChunkIdentifier, fvf: Term, - perm: Term, + condition: Term, + permValue: Term, invs: Option[InverseFunctions], - initialCond: Option[Term], singletonRcvr: Option[Term], hints: Seq[Term] = Nil) extends QuantifiedBasicChunk { require(fvf.sort.isInstanceOf[terms.sorts.FieldValueFunction], s"Quantified chunk values must be of sort FieldValueFunction, but found value $fvf of sort ${fvf.sort}") - require(perm.sort == sorts.Perm, s"Permissions $perm must be of sort Perm, but found ${perm.sort}") + require(permValue.sort == sorts.Perm, s"Permissions $permValue must be of sort Perm, but found ${permValue.sort}") override val resourceID = FieldID override val quantifiedVars = Seq(`?r`) + override val perm = Ite(condition, permValue, NoPerm) override def snapshotMap: Term = fvf override def singletonArguments: Option[Seq[Term]] = singletonRcvr.map(Seq(_)) @@ -89,8 +97,15 @@ case class QuantifiedFieldChunk(id: BasicChunkIdentifier, Lookup(id.name, fvf, arguments.head) } - override def withPerm(newPerm: Term) = QuantifiedFieldChunk(id, fvf, newPerm, invs, initialCond, singletonRcvr, hints) - override def withSnapshotMap(newFvf: Term) = QuantifiedFieldChunk(id, newFvf, perm, invs, initialCond, singletonRcvr, hints) + override def applyCondition(newCond: Term) = QuantifiedFieldChunk(id, fvf, terms.And(newCond, condition), permValue, invs, singletonRcvr, hints) + override def permMinus(newPerm: Term) = QuantifiedFieldChunk(id, fvf, condition, PermMinus(permValue, newPerm), invs, singletonRcvr, hints) + override def permPlus(newPerm: Term) = QuantifiedFieldChunk(id, fvf, condition, PermPlus(permValue, newPerm), invs, singletonRcvr, hints) + override def withSnapshotMap(newFvf: Term) = QuantifiedFieldChunk(id, newFvf, condition, permValue, invs, singletonRcvr, hints) + override def withPerm(newPerm: Term) = { + // TODO: This implementation only replaces permValue, so if the caller assumes that it'll set the perm value period, it is + // only correct if newPerm is zero when condition is false. + QuantifiedFieldChunk(id, fvf, condition, newPerm, invs, singletonRcvr, hints) + } override lazy val toString = s"${terms.Forall} ${`?r`} :: ${`?r`}.$id -> $fvf # $perm" } @@ -98,26 +113,30 @@ case class QuantifiedFieldChunk(id: BasicChunkIdentifier, case class QuantifiedPredicateChunk(id: BasicChunkIdentifier, quantifiedVars: Seq[Var], psf: Term, - perm: Term, + condition: Term, + permValue: Term, invs: Option[InverseFunctions], - initialCond: Option[Term], singletonArgs: Option[Seq[Term]], hints: Seq[Term] = Nil) extends QuantifiedBasicChunk { require(psf.sort.isInstanceOf[terms.sorts.PredicateSnapFunction], s"Quantified predicate chunk values must be of sort PredicateSnapFunction ($psf), but found ${psf.sort}") - require(perm.sort == sorts.Perm, s"Permissions $perm must be of sort Perm, but found ${perm.sort}") + require(permValue.sort == sorts.Perm, s"Permissions $permValue must be of sort Perm, but found ${permValue.sort}") override val resourceID = PredicateID + override val perm = Ite(condition, permValue, NoPerm) override def snapshotMap: Term = psf override def singletonArguments: Option[Seq[Term]] = singletonArgs override def valueAt(args: Seq[Term]) = PredicateLookup(id.name, psf, args) - override def withPerm(newPerm: Term) = QuantifiedPredicateChunk(id, quantifiedVars, psf, newPerm, invs, initialCond, singletonArgs, hints) - override def withSnapshotMap(newPsf: Term) = QuantifiedPredicateChunk(id, quantifiedVars, newPsf, perm, invs, initialCond, singletonArgs, hints) + override def applyCondition(newCond: Term) = QuantifiedPredicateChunk(id, quantifiedVars, psf, terms.And(newCond, condition), permValue, invs, singletonArgs, hints) + override def permMinus(newPerm: Term) = QuantifiedPredicateChunk(id, quantifiedVars, psf, condition, PermMinus(permValue, newPerm), invs, singletonArgs, hints) + override def permPlus(newPerm: Term) = QuantifiedPredicateChunk(id, quantifiedVars, psf, condition, PermPlus(permValue, newPerm), invs, singletonArgs, hints) + override def withPerm(newPerm: Term) = QuantifiedPredicateChunk(id, quantifiedVars, psf, condition, newPerm, invs, singletonArgs, hints) + override def withSnapshotMap(newPsf: Term) = QuantifiedPredicateChunk(id, quantifiedVars, newPsf, condition, permValue, invs, singletonArgs, hints) override lazy val toString = s"${terms.Forall} ${quantifiedVars.mkString(",")} :: $id(${quantifiedVars.mkString(",")}) -> $psf # $perm" } @@ -126,7 +145,6 @@ case class QuantifiedMagicWandChunk(id: MagicWandIdentifier, wsf: Term, perm: Term, invs: Option[InverseFunctions], - initialCond: Option[Term], singletonArgs: Option[Seq[Term]], hints: Seq[Term] = Nil) extends QuantifiedBasicChunk { @@ -140,9 +158,11 @@ case class QuantifiedMagicWandChunk(id: MagicWandIdentifier, override def singletonArguments: Option[Seq[Term]] = singletonArgs override def valueAt(args: Seq[Term]) = PredicateLookup(id.toString, wsf, args) - - override def withPerm(newPerm: Term) = QuantifiedMagicWandChunk(id, quantifiedVars, wsf, newPerm, invs, initialCond, singletonArgs, hints) - override def withSnapshotMap(newWsf: Term) = QuantifiedMagicWandChunk(id, quantifiedVars, newWsf, perm, invs, initialCond, singletonArgs, hints) + override def applyCondition(newCond: Term) = withPerm(Ite(newCond, perm, NoPerm)) + override def permMinus(newPerm: Term) = withPerm(PermMinus(perm, newPerm)) + override def permPlus(newPerm: Term) = withPerm(PermPlus(perm, newPerm)) + def withPerm(newPerm: Term) = QuantifiedMagicWandChunk(id, quantifiedVars, wsf, newPerm, invs, singletonArgs, hints) + override def withSnapshotMap(newWsf: Term) = QuantifiedMagicWandChunk(id, quantifiedVars, newWsf, perm, invs, singletonArgs, hints) override lazy val toString = s"${terms.Forall} ${quantifiedVars.mkString(",")} :: $id(${quantifiedVars.mkString(",")}) -> $wsf # $perm" } @@ -175,6 +195,15 @@ case class MagicWandChunk(id: MagicWandIdentifier, override val resourceID = MagicWandID + override def applyCondition(newCond: Term) = + withPerm(Ite(newCond, perm, NoPerm)) + + override def permMinus(newPerm: Term) = + withPerm(PermMinus(perm, newPerm)) + + override def permPlus(newPerm: Term) = + withPerm(PermPlus(perm, newPerm)) + override def withPerm(newPerm: Term) = MagicWandChunk(id, bindings, args, snap, newPerm) override def withSnap(newSnap: Term) = newSnap match { case s: MagicWandSnapshot => MagicWandChunk(id, bindings, args, s, perm) diff --git a/src/main/scala/state/State.scala b/src/main/scala/state/State.scala index 8d6a6294b..348229b68 100644 --- a/src/main/scala/state/State.scala +++ b/src/main/scala/state/State.scala @@ -279,7 +279,7 @@ object State { h map (c => { c match { case c: GeneralChunk => - c.withPerm(Ite(cond, c.perm, NoPerm)) + c.applyCondition(cond) case _ => sys.error("Chunk type not conditionalizable.") } }) diff --git a/src/test/resources/kinduct/basic_while.vpr b/src/test/resources/kinduct/basic_while.vpr index 2ccd1befe..30aeaec3e 100644 --- a/src/test/resources/kinduct/basic_while.vpr +++ b/src/test/resources/kinduct/basic_while.vpr @@ -140,3 +140,67 @@ method t11(r: Ref) returns () j := j+1 } } + +method justChecking(r: Ref) returns () + requires acc(r.f, write) +{ + var j: Int := 2 + while (j < (r.f)) + invariant acc(r.f, 1/2) + { + j := j+1 + } + assert perm(r.f) == write + //:: ExpectedOutput(assert.failed:assertion.false) + assert false +} + +method justChecking2(r: Ref) returns () + requires acc(r.f, write) +{ + var j: Int := 2 + assume r.f == 32 + while (j < (r.f)) + invariant acc(r.f, 1/2) + { + assume r.f == 34 + j := j+1 + } + assert perm(r.f) == write + assert false +} + +method justChecking3(r: Ref) returns () + requires acc(r.f, write) +{ + var j: Int := 2 + assume r.f == 32 + while (j < (r.f)) + invariant acc(r.f, 1/2) + { + r.f := 34 + j := j+1 + } + assert perm(r.f) == write + assert r.f == 32 || r.f == 34 + assert r.f == 34 + //:: ExpectedOutput(assert.failed:assertion.false) + assert r.f == 32 +} + +method justChecking4(r: Ref) returns () + requires acc(r.f, write) +{ + var j: Int + assume r.f == 32 + while (j < (r.f)) + invariant acc(r.f, 1/2) + { + r.f := 34 + j := j+1 + } + assert perm(r.f) == write + assert r.f == 32 || r.f == 34 + //:: ExpectedOutput(assert.failed:assertion.false) + assert r.f == 34 +} \ No newline at end of file diff --git a/src/test/resources/kinduct/forceQP/basic_while.vpr b/src/test/resources/kinduct/forceQP/basic_while.vpr index c7410b11d..d1c4a67ce 100644 --- a/src/test/resources/kinduct/forceQP/basic_while.vpr +++ b/src/test/resources/kinduct/forceQP/basic_while.vpr @@ -156,3 +156,71 @@ method t11(r: Ref) returns () j := j+1 } } + +method justChecking(r: Ref) returns () + requires acc(r.f, write) +{ + var j: Int := 2 + forceQPs() + while (j < (r.f)) + invariant acc(r.f, 1/2) + { + j := j+1 + } + assert perm(r.f) == write + //:: ExpectedOutput(assert.failed:assertion.false) + assert false +} + +method justChecking2(r: Ref) returns () + requires acc(r.f, write) +{ + var j: Int := 2 + assume r.f == 32 + forceQPs() + while (j < (r.f)) + invariant acc(r.f, 1/2) + { + assume r.f == 34 + j := j+1 + } + assert perm(r.f) == write + assert false +} + +method justChecking3(r: Ref) returns () + requires acc(r.f, write) +{ + var j: Int := 2 + assume r.f == 32 + forceQPs() + while (j < (r.f)) + invariant acc(r.f, 1/2) + { + r.f := 34 + j := j+1 + } + assert perm(r.f) == write + assert r.f == 32 || r.f == 34 + assert r.f == 34 + //:: ExpectedOutput(assert.failed:assertion.false) + assert r.f == 32 +} + +method justChecking4(r: Ref) returns () + requires acc(r.f, write) +{ + var j: Int + assume r.f == 32 + forceQPs() + while (j < (r.f)) + invariant acc(r.f, 1/2) + { + r.f := 34 + j := j+1 + } + assert perm(r.f) == write + assert r.f == 32 || r.f == 34 + //:: ExpectedOutput(assert.failed:assertion.false) + assert r.f == 34 +} \ No newline at end of file