Skip to content

Commit

Permalink
Merge
Browse files Browse the repository at this point in the history
  • Loading branch information
marcoeilers committed Oct 9, 2023
2 parents 52defae + 53cccb9 commit 699711e
Show file tree
Hide file tree
Showing 10 changed files with 77 additions and 19 deletions.
18 changes: 13 additions & 5 deletions src/main/scala/decider/Decider.scala
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,8 @@ trait Decider {
def setPathConditionMark(): Mark

def assume(t: Term): Unit
def assume(ts: InsertionOrderedSet[Term], enforceAssumption: Boolean = false): Unit
def assumeDefinition(t: Term): Unit
def assume(ts: InsertionOrderedSet[Term], enforceAssumption: Boolean = false, isDefinition: Boolean = false): Unit
def assume(ts: Iterable[Term]): Unit

def getTriggerGenerator(): TriggerGenerator
Expand Down Expand Up @@ -213,23 +214,30 @@ trait DefaultDeciderProvider extends VerifierComponent { this: Verifier =>
assume(InsertionOrderedSet(Seq(t)), false)
}

override def assumeDefinition(t: Term): Unit =
assume(InsertionOrderedSet(Seq(t)), false, true)

def assume(terms: Iterable[Term]): Unit =
assume(InsertionOrderedSet(terms), false)

def assume(terms: InsertionOrderedSet[Term], enforceAssumption: Boolean = false): Unit = {
def assume(terms: InsertionOrderedSet[Term], enforceAssumption: Boolean = false, isDefinition: Boolean = false): Unit = {
val filteredTerms =
if (enforceAssumption) terms
else terms filterNot isKnownToBeTrue

if (filteredTerms.nonEmpty) assumeWithoutSmokeChecks(filteredTerms)
if (filteredTerms.nonEmpty) assumeWithoutSmokeChecks(filteredTerms, isDefinition)
}

private def assumeWithoutSmokeChecks(terms: InsertionOrderedSet[Term]) = {
private def assumeWithoutSmokeChecks(terms: InsertionOrderedSet[Term], isDefinition: Boolean = false) = {
val assumeRecord = new DeciderAssumeRecord(terms)
val sepIdentifier = symbExLog.openScope(assumeRecord)

/* Add terms to Silicon-managed path conditions */
terms foreach pathConditions.add
if (isDefinition) {
terms foreach pathConditions.addDefinition
} else {
terms foreach pathConditions.add
}

/* Add terms to the prover's assumptions */
terms foreach prover.assume
Expand Down
50 changes: 50 additions & 0 deletions src/main/scala/decider/PathConditions.scala
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,11 @@ trait RecordedPathConditions {
def branchConditions: Stack[Term]
def branchConditionExps: Stack[Option[ast.Exp]]
def assumptions: InsertionOrderedSet[Term]
def definingAssumptions: InsertionOrderedSet[Term]
def declarations: InsertionOrderedSet[Decl]

def definitionsOnly: RecordedPathConditions

def contains(assumption: Term): Boolean

def conditionalized: Seq[Term]
Expand All @@ -42,6 +45,7 @@ trait RecordedPathConditions {
trait PathConditionStack extends RecordedPathConditions {
def setCurrentBranchCondition(condition: Term, conditionExp: Option[ast.Exp]): Unit
def add(assumption: Term): Unit
def addDefinition(assumption: Term): Unit
def add(declaration: Decl): Unit
def pushScope(): Unit
def popScope(): Unit
Expand All @@ -64,17 +68,26 @@ private class PathConditionStackLayer
private var _branchConditionExp: Option[Option[ast.Exp]] = None
private var _globalAssumptions: InsertionOrderedSet[Term] = InsertionOrderedSet.empty
private var _nonGlobalAssumptions: InsertionOrderedSet[Term] = InsertionOrderedSet.empty
private var _globalDefiningAssumptions: InsertionOrderedSet[Term] = InsertionOrderedSet.empty
private var _declarations: InsertionOrderedSet[Decl] = InsertionOrderedSet.empty

def branchCondition: Option[Term] = _branchCondition
def branchConditionExp: Option[Option[ast.Exp]] = _branchConditionExp
def globalAssumptions: InsertionOrderedSet[Term] = _globalAssumptions
def definingAssumptions: InsertionOrderedSet[Term] = _globalDefiningAssumptions
def nonGlobalAssumptions: InsertionOrderedSet[Term] = _nonGlobalAssumptions
def declarations: InsertionOrderedSet[Decl] = _declarations

def assumptions: InsertionOrderedSet[Term] = globalAssumptions ++ nonGlobalAssumptions
def pathConditions: InsertionOrderedSet[Term] = assumptions ++ branchCondition

def definitionsOnly(): PathConditionStackLayer = {
val result = new PathConditionStackLayer
result._globalAssumptions = _globalDefiningAssumptions
result._declarations = _declarations
result
}

def branchCondition_=(condition: Term): Unit = {
assert(_branchCondition.isEmpty,
s"Branch condition is already set (to ${_branchCondition.get}), "
Expand Down Expand Up @@ -104,6 +117,16 @@ private class PathConditionStackLayer
_nonGlobalAssumptions += assumption
}

def addDefinition(assumption: Term): Unit = {
assert(
!assumption.isInstanceOf[And],
s"Unexpectedly found a conjunction (should have been split): $assumption")

//assert(PathConditions.isGlobal(assumption))
_globalAssumptions += assumption
_globalDefiningAssumptions += assumption
}

def add(declaration: Decl): Unit = _declarations += declaration

def contains(pathCondition: Term): Boolean = {
Expand Down Expand Up @@ -134,6 +157,9 @@ private trait LayeredPathConditionStackLike {
protected def assumptions(layers: Stack[PathConditionStackLayer]): InsertionOrderedSet[Term] =
InsertionOrderedSet(layers.flatMap(_.assumptions)) // Note: Performance?

protected def definingAssumptions(layers: Stack[PathConditionStackLayer]): InsertionOrderedSet[Term] =
InsertionOrderedSet(layers.flatMap(_.definingAssumptions)) // Note: Performance?

protected def declarations(layers: Stack[PathConditionStackLayer]): InsertionOrderedSet[Decl] =
InsertionOrderedSet(layers.flatMap(_.declarations)) // Note: Performance?

Expand Down Expand Up @@ -199,12 +225,17 @@ private class DefaultRecordedPathConditions(from: Stack[PathConditionStackLayer]
val branchConditions: Stack[Term] = branchConditions(from)
val branchConditionExps: Stack[Option[ast.Exp]] = branchConditionExps(from)
val assumptions: InsertionOrderedSet[Term] = assumptions(from)
val definingAssumptions: InsertionOrderedSet[Term] = definingAssumptions(from)
val declarations: InsertionOrderedSet[Decl] = declarations(from)

def contains(assumption: Term): Boolean = contains(from, assumption)

val conditionalized: Seq[Term] = conditionalized(from)

def definitionsOnly(): RecordedPathConditions = {
new DefaultRecordedPathConditions(from.map(_.definitionsOnly))
}

def quantified(quantifier: Quantifier,
qvars: Seq[Var],
triggers: Seq[Trigger],
Expand Down Expand Up @@ -248,6 +279,15 @@ private[decider] class LayeredPathConditionStack
allAssumptions ++= tlcs
}

def addDefinition(assumption: Term): Unit = {
/* TODO: Would be cleaner to not add assumptions that are already set as branch conditions */

val tlcs = assumption.topLevelConjuncts

tlcs foreach layers.head.addDefinition
allAssumptions ++= tlcs
}

def add(declaration: Decl): Unit = {
layers.head.add(declaration)
}
Expand Down Expand Up @@ -311,6 +351,9 @@ private[decider] class LayeredPathConditionStack
def declarations: InsertionOrderedSet[Decl] =
InsertionOrderedSet(layers.flatMap(_.declarations)) // Note: Performance?

def definingAssumptions: InsertionOrderedSet[Term] =
InsertionOrderedSet(layers.flatMap(_.definingAssumptions)) // Note: Performance?

def contains(assumption: Term): Boolean = allAssumptions.contains(assumption)

def conditionalized: Seq[Term] = conditionalized(layers)
Expand Down Expand Up @@ -357,6 +400,13 @@ private[decider] class LayeredPathConditionStack
clonedStack
}

override def definitionsOnly: RecordedPathConditions = {
val result = duplicate()
result.layers = layers map (_.definitionsOnly())
result.allAssumptions = InsertionOrderedSet(layers.flatMap(_.definingAssumptions))
result
}

override def toString: String = {
val sb = new StringBuilder(s"${this.getClass.getSimpleName}:\n")
val sep = s" ${"-" * 10}\n"
Expand Down
4 changes: 2 additions & 2 deletions src/main/scala/rules/Evaluator.scala
Original file line number Diff line number Diff line change
Expand Up @@ -175,7 +175,7 @@ object evaluator extends EvaluationRules {

case _: ast.WildcardPerm =>
val (tVar, tConstraints) = v.decider.freshARP()
v.decider.assume(tConstraints)
v.decider.assumeDefinition(tConstraints)
/* TODO: Only record wildcards in State.constrainableARPs that are used in exhale
* position. Currently, wildcards used in inhale position (only) may not be removed
* from State.constrainableARPs (potentially inefficient, but should be sound).
Expand Down Expand Up @@ -329,7 +329,7 @@ object evaluator extends EvaluationRules {
case ast.Let(x, e0, e1) =>
eval(s, e0, pve, v)((s1, t0, v1) => {
val t = v1.decider.appliedFresh("letvar", v1.symbolConverter.toSort(x.typ), s1.relevantQuantifiedVariables)
v1.decider.assume(t === t0)
v1.decider.assumeDefinition(t === t0)
val newFuncRec = s1.functionRecorder.recordFreshSnapshot(t.applicable.asInstanceOf[Function])
eval(s1.copy(g = s1.g + (x.localVar, t0), functionRecorder = newFuncRec), e1, pve, v1)(Q)
})
Expand Down
6 changes: 3 additions & 3 deletions src/main/scala/rules/Executor.scala
Original file line number Diff line number Diff line change
Expand Up @@ -368,7 +368,7 @@ object executor extends ExecutionRules {
val h3 = Heap(remainingChunks ++ otherChunks)
val (sm, smValueDef) = quantifiedChunkSupporter.singletonSnapshotMap(s3, field, Seq(tRcvr), tRhs, v2)
v1.decider.prover.comment("Definitional axioms for singleton-FVF's value")
v1.decider.assume(smValueDef)
v1.decider.assumeDefinition(smValueDef)
val ch = quantifiedChunkSupporter.createSingletonQuantifiedChunk(Seq(`?r`), field, Seq(tRcvr), FullPerm, sm, s.program)
if (s3.heapDependentTriggers.contains(field))
v1.decider.assume(FieldTrigger(field.name, sm, tRcvr))
Expand Down Expand Up @@ -422,7 +422,7 @@ object executor extends ExecutionRules {
if (s.qpFields.contains(field)) {
val (sm, smValueDef) = quantifiedChunkSupporter.singletonSnapshotMap(s, field, Seq(tRcvr), snap, v)
v.decider.prover.comment("Definitional axioms for singleton-FVF's value")
v.decider.assume(smValueDef)
v.decider.assumeDefinition(smValueDef)
quantifiedChunkSupporter.createSingletonQuantifiedChunk(Seq(`?r`), field, Seq(tRcvr), p, sm, s.program)
} else {
BasicChunk(FieldID, BasicChunkIdentifier(field.name), Seq(tRcvr), snap, p)
Expand Down Expand Up @@ -711,7 +711,7 @@ object executor extends ExecutionRules {
* reported by Silicon issue #328.
*/
val t = v.decider.fresh(name, v.symbolConverter.toSort(typ))
v.decider.assume(t === rhs)
v.decider.assumeDefinition(t === rhs)

t
}
Expand Down
8 changes: 4 additions & 4 deletions src/main/scala/rules/MagicWandSupporter.scala
Original file line number Diff line number Diff line change
Expand Up @@ -289,15 +289,15 @@ object magicWandSupporter extends SymbolicExecutionRules {
// it is, we don't record (any further) path conditions.
// TODO: Fix this. Might require a substantial redesign of Silicon's path conditions, though.

if (!v4.decider.checkSmoke()) {
conservedPcs = s5.conservedPcs.head :+ pcs
//if (!v4.decider.checkSmoke()) {
conservedPcs = s5.conservedPcs.head :+ pcs.definitionsOnly

conservedPcsStack =
s5.conservedPcs.tail match {
case empty @ Seq() => empty
case head +: tail => (head ++ conservedPcs) +: tail
}
}
//}

val s6 = s5.copy(conservedPcs = conservedPcsStack, recordPcs = s.recordPcs)

Expand Down Expand Up @@ -328,7 +328,7 @@ object magicWandSupporter extends SymbolicExecutionRules {
val (sm, smValueDef) =
quantifiedChunkSupporter.singletonSnapshotMap(s5, wand, args, MagicWandSnapshot(freshSnapRoot, snap), v4)
v4.decider.prover.comment("Definitional axioms for singleton-SM's value")
v4.decider.assume(smValueDef)
v4.decider.assumeDefinition(smValueDef)
val ch = quantifiedChunkSupporter.createSingletonQuantifiedChunk(formalVars, wand, args, FullPerm, sm, s.program)
appendToResults(s5, ch, v4.decider.pcs.after(preMark), v4)
Success()
Expand Down
2 changes: 1 addition & 1 deletion src/main/scala/rules/MoreCompleteExhaleSupporter.scala
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ object moreCompleteExhaleSupporter extends SymbolicExecutionRules {
}
val (s1, taggedSnap, snapDefs, permSum) = summariseOnly(s, relevantChunks, resource, args, v)

v.decider.assume(And(snapDefs))
v.decider.assumeDefinition(And(snapDefs))
// v.decider.assume(PermAtMost(permSum, FullPerm())) /* Done in StateConsolidator instead */

val s2 =
Expand Down
2 changes: 1 addition & 1 deletion src/main/scala/rules/PredicateSupporter.scala
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ object predicateSupporter extends PredicateSupportRules {
val (sm, smValueDef) =
quantifiedChunkSupporter.singletonSnapshotMap(s2, predicate, tArgs, predSnap, v1)
v1.decider.prover.comment("Definitional axioms for singleton-SM's value")
v1.decider.assume(smValueDef)
v1.decider.assumeDefinition(smValueDef)
val ch =
quantifiedChunkSupporter.createSingletonQuantifiedChunk(
formalArgs, predicate, tArgs, tPerm, sm, s.program)
Expand Down
2 changes: 1 addition & 1 deletion src/main/scala/rules/Producer.scala
Original file line number Diff line number Diff line change
Expand Up @@ -368,7 +368,7 @@ object producer extends ProductionRules {
quantifiedChunkSupporter.singletonSnapshotMap(s1, wand, args, sf(sorts.Snap, v1), v1)
v1.decider.prover.comment("Definitional axioms for singleton-SM's value")
val definitionalAxiomMark = v1.decider.setPathConditionMark()
v1.decider.assume(smValueDef)
v1.decider.assumeDefinition(smValueDef)
val conservedPcs =
if (s1.recordPcs) (s1.conservedPcs.head :+ v1.decider.pcs.after(definitionalAxiomMark)) +: s1.conservedPcs.tail
else s1.conservedPcs
Expand Down
2 changes: 1 addition & 1 deletion src/main/scala/rules/QuantifiedChunkSupport.scala
Original file line number Diff line number Diff line change
Expand Up @@ -967,7 +967,7 @@ object quantifiedChunkSupporter extends QuantifiedChunkSupport {
val (sm, smValueDef) = quantifiedChunkSupporter.singletonSnapshotMap(s, resource, tArgs, tSnap, v)
v.decider.prover.comment("Definitional axioms for singleton-SM's value")
val definitionalAxiomMark = v.decider.setPathConditionMark()
v.decider.assume(smValueDef)
v.decider.assumeDefinition(smValueDef)
val conservedPcs =
if (s.recordPcs) (s.conservedPcs.head :+ v.decider.pcs.after(definitionalAxiomMark)) +: s.conservedPcs.tail
else s.conservedPcs
Expand Down
2 changes: 1 addition & 1 deletion src/main/scala/state/Chunks.scala
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ object BasicMaskHeapChunk {
val newVar = d.fresh(t.sort)
val constraint = newVar === t
newFr = newFr.recordArp(newVar, constraint)
d.assume(constraint)
d.assumeDefinition(constraint)
newVar
}()
(new BasicMaskHeapChunk(resourceID, resource, cleanMask, heap), newFr)
Expand Down

0 comments on commit 699711e

Please sign in to comment.