From 0cb28796b22f75b09227aaf01cad247f7d30bf1d Mon Sep 17 00:00:00 2001 From: James Tobler <64625414+j-tobler@users.noreply.github.com> Date: Mon, 12 May 2025 15:26:03 +1000 Subject: [PATCH 01/30] init branch --- src/main/scala/util/Runner.scala | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 src/main/scala/util/Runner.scala diff --git a/src/main/scala/util/Runner.scala b/src/main/scala/util/Runner.scala new file mode 100644 index 0000000000..488d8e9bfb --- /dev/null +++ b/src/main/scala/util/Runner.scala @@ -0,0 +1,3 @@ +package main.scala.util + + From fd89355af79d98811e43c075fd2e4063b062a9a2 Mon Sep 17 00:00:00 2001 From: James Tobler <64625414+j-tobler@users.noreply.github.com> Date: Tue, 27 May 2025 10:38:42 +1000 Subject: [PATCH 02/30] implemented classes representing transforms, static analyses, and memoized sets of analyses --- src/main/scala/util/Runner.scala | 79 +++++++++++++++++++++++++++++++- 1 file changed, 78 insertions(+), 1 deletion(-) diff --git a/src/main/scala/util/Runner.scala b/src/main/scala/util/Runner.scala index 488d8e9bfb..1f77a29882 100644 --- a/src/main/scala/util/Runner.scala +++ b/src/main/scala/util/Runner.scala @@ -1,3 +1,80 @@ -package main.scala.util +package util +import ir.* +trait Transform[AS <: AnalysisSet](val name: String) { + val t = PerformanceTimer(name) + + protected def implementation: (IRContext, AS) => Unit + + def apply(ctx: IRContext, analyses: AS) = { + t.checkPoint("start") + implementation(ctx, analyses) + t.checkPoint("end") + analyses.invalidate() + } +} + +trait StaticAnalysis(val name: String) { + type ReturnType + + val t = PerformanceTimer(name) + + protected def implementation: Program => ReturnType + + def apply(prog: Program): ReturnType = { + t.checkPoint("start") + implementation(prog) + t.checkPoint("end") + } +} + +class AnalysisSet(program: Program) { + + private class Memoizer(analysis: StaticAnalysis, program: Program) { + + private var memo: Option[analysis.ReturnType] = None + private var preserved: Boolean = false + + def invalidate() = { + if (preserved) { + preserved = false + } else { + memo = None + } + } + + def apply(): analysis.ReturnType = { + if (memo.isEmpty) { + memo = Some(analysis(program)) + } + memo.get + } + + def preserve() = { preserved = true } + } + + private var memoizers: List[Memoizer[?]] = Nil + + def register(analysis: StaticAnalysis): Memoizer = { + val mem = Memoizer(analysis, program) + memoizers ::= mem + return mem + } + + def invalidate() = { memoizers.foreach(_.invalidate()) } +} + +class ExampleAnalysisSet(p: Program) extends AnalysisSet(p) { + val exampleAnalysis1 = register(ExampleAnalysis()) + val exampleAnalysis2 = register(ExampleAnalysis()) + val exampleAnalysis3 = register(ExampleAnalysis()) +} + +def example() = { + val p = Program() + val exampleAnalysisSet = ExampleAnalysisSet(p) + val result = exampleAnalysisSet.exampleAnalysis1() + exampleAnalysisSet.exampleAnalysis1.preserve() + exampleAnalysisSet.invalidate() +} From 6aba2ff2b4a1720d7df9cbd7cd3fe533b95dae74 Mon Sep 17 00:00:00 2001 From: James Tobler <64625414+j-tobler@users.noreply.github.com> Date: Fri, 30 May 2025 14:00:17 +1000 Subject: [PATCH 03/30] implemented Transform classes for doCleanup, prepareForTranslation, generateProcedureSummaries and generateRelyGuaranteeConditions; added documentation to Transform, StaticAnalysis and AnalysisManager classes; started on transforms for doSimplify --- src/main/scala/ir/transforms/Simp.scala | 239 ++++-- .../StripUnreachableFunctions.scala | 83 +- src/main/scala/util/Runner.scala | 716 +++++++++++++++++- 3 files changed, 889 insertions(+), 149 deletions(-) diff --git a/src/main/scala/ir/transforms/Simp.scala b/src/main/scala/ir/transforms/Simp.scala index 91b311873f..2fd7169c42 100644 --- a/src/main/scala/ir/transforms/Simp.scala +++ b/src/main/scala/ir/transforms/Simp.scala @@ -791,55 +791,79 @@ def coalesceBlocksCrossBranchDependency(p: Program): Boolean = { candidate.nonEmpty } -def coalesceBlocks(proc: Procedure): Boolean = { - var didAny = false - val blocks = proc.blocks.toList - for (b <- blocks.sortBy(_.rpoOrder)) { - if ( - b.prevBlocks.size == 1 && b.prevBlocks.head.statements.nonEmpty && b.statements.nonEmpty - && b.prevBlocks.head.nextBlocks.size == 1 - && b.prevBlocks.head.statements.lastOption.forall(s => !s.isInstanceOf[Call]) - && !(b.parent.entryBlock.contains(b) || b.parent.returnBlock.contains(b)) - && b.atomicSection.isEmpty && b.prevBlocks.forall(_.atomicSection.isEmpty) - ) { - didAny = true - // append topredecessor - // we know prevBlock is only jumping to b and has no call at the end - val prevBlock = b.prevBlocks.head - val stmts = b.statements.map(b.statements.remove).toList - prevBlock.statements.appendAll(stmts) - // leave empty block b and cleanup with removeEmptyBlocks - } else if ( - b.nextBlocks.size == 1 && b.nextBlocks.head.statements.nonEmpty && b.statements.nonEmpty - && b.nextBlocks.head.prevBlocks.size == 1 - && b.statements.lastOption.forall(s => !s.isInstanceOf[Call]) - && !(b.parent.entryBlock.contains(b) || b.parent.returnBlock.contains(b)) - && b.atomicSection.isEmpty && b.nextBlocks.forall(_.atomicSection.isEmpty) - ) { - didAny = true - // append to successor - // we know b is only jumping to nextBlock and does not end in a call - val nextBlock = b.nextBlocks.head - val stmts = b.statements.map(b.statements.remove).toList - nextBlock.statements.prependAll(stmts) - // leave empty block b and cleanup with removeEmptyBlocks - } else if (b.jump.isInstanceOf[Unreachable] && b.statements.isEmpty && b.prevBlocks.size == 1) { - b.prevBlocks.head.replaceJump(Unreachable()) - b.parent.removeBlocks(b) + +class CoalesceBlocks extends Transform("CoalesceBlocks") { + protected def coalesceBlocks(proc: Procedure): Boolean = { + var didAny = false + + val blocks = proc.blocks.toList + for (b <- blocks.sortBy(_.rpoOrder)) { + if ( + b.prevBlocks.size == 1 && b.prevBlocks.head.statements.nonEmpty && b.statements.nonEmpty + && b.prevBlocks.head.nextBlocks.size == 1 + && b.prevBlocks.head.statements.lastOption.forall(s => !s.isInstanceOf[Call]) + && !(b.parent.entryBlock.contains(b) || b.parent.returnBlock.contains(b)) + && b.atomicSection.isEmpty && b.prevBlocks.forall(_.atomicSection.isEmpty) + ) { + didAny = true + // append topredecessor + // we know prevBlock is only jumping to b and has no call at the end + val prevBlock = b.prevBlocks.head + val stmts = b.statements.map(b.statements.remove).toList + prevBlock.statements.appendAll(stmts) + // leave empty block b and cleanup with removeEmptyBlocks + } else if ( + b.nextBlocks.size == 1 && b.nextBlocks.head.statements.nonEmpty && b.statements.nonEmpty + && b.nextBlocks.head.prevBlocks.size == 1 + && b.statements.lastOption.forall(s => !s.isInstanceOf[Call]) + && !(b.parent.entryBlock.contains(b) || b.parent.returnBlock.contains(b)) + && b.atomicSection.isEmpty && b.nextBlocks.forall(_.atomicSection.isEmpty) + ) { + didAny = true + // append to successor + // we know b is only jumping to nextBlock and does not end in a call + val nextBlock = b.nextBlocks.head + val stmts = b.statements.map(b.statements.remove).toList + nextBlock.statements.prependAll(stmts) + // leave empty block b and cleanup with removeEmptyBlocks + } else if (b.jump.isInstanceOf[Unreachable] && b.statements.isEmpty && b.prevBlocks.size == 1) { + b.prevBlocks.head.replaceJump(Unreachable()) + b.parent.removeBlocks(b) + } + } + didAny + } + + protected def coalesceBlocks(p: Program): Boolean = { + var didAny = false + for (proc <- p.procedures) { + didAny = didAny || coalesceBlocks(proc) } + didAny + } + + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { + coalesceBlocks(ctx.program) + Set.empty } - didAny } -def coalesceBlocks(p: Program): Boolean = { - var didAny = false - for (proc <- p.procedures) { - didAny = didAny || coalesceBlocks(proc) +class CoalesceBlocksFixpoint extends CoalesceBlocks { + override val name: String = "CoalesceBlocksFixpoint" + + override def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { + // useful for ReplaceReturns + // (pushes single block with `Unreachable` into its predecessor) + while (coalesceBlocks(ctx.program)) {} + Set.empty } - didAny } + + + + def removeDeadInParams(p: Program): Boolean = { var modified = false assert(invariant.correctCalls(p)) @@ -1108,41 +1132,49 @@ def copyPropParamFixedPoint(p: Program, rela: Map[BigInt, BigInt]): Int = { iterations } -def reversePostOrder(p: Procedure): Unit = { - /* Procedures may contain disconnected sets of blocks so we arbitrarily order these with respect to eachother. */ - for (b <- p.blocks) { - b.rpoOrder = -1 - } - var left = p.entryBlock.map(reversePostOrder(_)).getOrElse(0) + 1 - for (b <- p.blocks.filter(_.rpoOrder == -1)) { - left = reversePostOrder(b, true, left) + 1 + +class ApplyRpo extends Transform("ApplyRpo") { + def reversePostOrder(p: Procedure): Unit = { + /* Procedures may contain disconnected sets of blocks so we arbitrarily order these with respect to eachother. */ + for (b <- p.blocks) { + b.rpoOrder = -1 + } + var left = p.entryBlock.map(reversePostOrder(_)).getOrElse(0) + 1 + for (b <- p.blocks.filter(_.rpoOrder == -1)) { + left = reversePostOrder(b, true, left) + 1 + } } -} -def reversePostOrder(startBlock: Block, fixup: Boolean = false, begin: Int = 0): Int = { - var count = begin - val seen = mutable.HashSet[Block]() + private def reversePostOrder(startBlock: Block, fixup: Boolean = false, begin: Int = 0): Int = { + var count = begin + val seen = mutable.HashSet[Block]() - def walk(b: Block): Unit = { - seen += b - for (s <- b.nextBlocks) { - if (!seen.contains(s)) { - walk(s) + def walk(b: Block): Unit = { + seen += b + for (s <- b.nextBlocks) { + if (!seen.contains(s)) { + walk(s) + } } + if (!fixup || b.rpoOrder < count) { + b.rpoOrder = count + } + count += 1 } - if (!fixup || b.rpoOrder < count) { - b.rpoOrder = count - } - count += 1 + + walk(startBlock) + count } - walk(startBlock) - count -} + private def applyRPO(p: Program) = { + for (proc <- p.procedures) { + reversePostOrder(proc) + } + } -def applyRPO(p: Program) = { - for (proc <- p.procedures) { - reversePostOrder(proc) + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { + applyRPO(ctx.program) + Set.empty } } @@ -1594,7 +1626,7 @@ class DefinitelyExits(knownExit: Set[Procedure]) extends ProcedureSummaryGenerat } } -def findDefinitelyExits(p: Program) = { +def findDefinitelyExits(p: Program): ProcReturnInfo = { val exit = p.procedures.filter(p => p.procName == "exit").toSet val dom = DefinitelyExits(exit) val ldom = ProcExitsDomain(x => false) @@ -1610,6 +1642,50 @@ def findDefinitelyExits(p: Program) = { ) } +// todo: not sure whether to make 'findDefinitelyExits' a private method of this class +class ReplaceJumpsInNonReturningProcs extends Transform("ReplaceJumpsInNonReturningProcs") { + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { + val nonReturning = findDefinitelyExits(ctx.program) + ctx.program.mainProcedure.foreach { + case d: DirectCall if nonReturning.nonreturning.contains(d.target) => d.parent.replaceJump(Return()) + case _ => + } + Set.empty + } +} + +// todo: i have no idea what to do with this +class ReplaceReturnsTransform(doSimplify: Boolean) extends Transform("ReplaceReturns") { + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { + // FIXME: Main will often maintain the stack by loading R30 from the caller's stack frame + // before returning, which makes the R30 assertin faile. Hence we currently skip this + // assertion for main, instead we should precondition the stack layout before main + // but the interaction between spec and memory regions is nontrivial currently + cilvisitor.visit_prog( + transforms.ReplaceReturns(proc => doSimplify && ctx.program.mainProcedure != proc), + ctx.program + ) + transforms.addReturnBlocks(ctx.program, insertR30InvariantAssertion = _ => doSimplify) + cilvisitor.visit_prog(transforms.ConvertSingleReturn(), ctx.program) + Set.empty + } +} + +class RemoveExternalFunctionReferences extends Transform("RemoveExternalFunctionReferences") { + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { + val externalNames = ctx.externalFunctions.map(_.name) + val unqualifiedNames = externalNames.filter(_.contains('@')).map(_.split('@')(0)) + ExternalRemover(externalNames ++ unqualifiedNames).visitProgram(ctx.program) + for (p <- ctx.program.procedures) { + p.isExternal = Some( + ctx.externalFunctions.exists(e => e.name == p.procName || p.address.contains(e.offset)) || p.isExternal + .getOrElse(false) + ) + } + Set.empty + } +} + class Simplify(val res: Boolean => Variable => Option[Expr], val initialBlock: Block = null) extends CILVisitor { var madeAnyChange = false @@ -1819,14 +1895,21 @@ def removeTriviallyDeadBranches(p: Program, removeAllUnreachableBlocks: Boolean dead.nonEmpty } -// ensure procedure entry has no incoming jumps, if it does replace with new -// block jumping to the old procedure entry -def makeProcEntryNonLoop(p: Procedure) = { - if (p.entryBlock.exists(_.prevBlocks.nonEmpty)) { - val nb = Block(p.name + "_entry") - p.addBlock(nb) - val eb = p.entryBlock.get - nb.replaceJump(GoTo(eb)) - p.entryBlock = nb +class MakeProcEntriesNonLoops extends Transform("MakeProcEntriesNonLoops") { + // ensure procedure entry has no incoming jumps, if it does replace with new + // block jumping to the old procedure entry + private def makeProcEntryNonLoop(p: Procedure) = { + if (p.entryBlock.exists(_.prevBlocks.nonEmpty)) { + val nb = Block(p.name + "_entry") + p.addBlock(nb) + val eb = p.entryBlock.get + nb.replaceJump(GoTo(eb)) + p.entryBlock = nb + } + } + + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { + ctx.program.procedures.foreach(makeProcEntryNonLoop) + Set.empty } } diff --git a/src/main/scala/ir/transforms/StripUnreachableFunctions.scala b/src/main/scala/ir/transforms/StripUnreachableFunctions.scala index f4a984c191..01acfe3adf 100644 --- a/src/main/scala/ir/transforms/StripUnreachableFunctions.scala +++ b/src/main/scala/ir/transforms/StripUnreachableFunctions.scala @@ -5,47 +5,68 @@ import util.Logger import ir.cilvisitor.* // This shouldn't be run before indirect calls are resolved -def stripUnreachableFunctions(p: Program, depth: Int = Int.MaxValue): Unit = { - val procedureCalleeNames = p.procedures.map(f => f -> f.calls).toMap +class StripUnreachableFunctions(config: BASILConfig) extends Transform("StripUnreachableFunctions") { + private val before: Int - val toVisit: mutable.LinkedHashSet[(Int, Procedure)] = mutable.LinkedHashSet((0, p.mainProcedure)) - var reachableFound = true - val reachableNames = mutable.HashMap[Procedure, Int]() - while (toVisit.nonEmpty) { - val next = toVisit.head - toVisit.remove(next) + private def stripUnreachableFunctions(p: Program, depth: Int = Int.MaxValue): Unit = { + val procedureCalleeNames = p.procedures.map(f => f -> f.calls).toMap - if (next._1 <= depth) { + val toVisit: mutable.LinkedHashSet[(Int, Procedure)] = mutable.LinkedHashSet((0, p.mainProcedure)) + var reachableFound = true + val reachableNames = mutable.HashMap[Procedure, Int]() + while (toVisit.nonEmpty) { + val next = toVisit.head + toVisit.remove(next) - def addName(depth: Int, name: Procedure): Unit = { - val oldDepth = reachableNames.getOrElse(name, Integer.MAX_VALUE) - reachableNames.put(next._2, if depth < oldDepth then depth else oldDepth) + if (next._1 <= depth) { + + def addName(depth: Int, name: Procedure): Unit = { + val oldDepth = reachableNames.getOrElse(name, Integer.MAX_VALUE) + reachableNames.put(next._2, if depth < oldDepth then depth else oldDepth) + } + addName(next._1, next._2) + + val callees = procedureCalleeNames(next._2) + + toVisit.addAll(callees.diff(reachableNames.keySet).map(c => (next._1 + 1, c))) + callees.foreach(c => addName(next._1 + 1, c)) } - addName(next._1, next._2) + } + assert(invariant.cfgCorrect(p)) + val removed = p.procedures.filterNot(f => reachableNames.keySet.contains(f)).toSet + // p.procedures = p.procedures.filter(f => reachableNames.keySet.contains(f.name)) + for (proc <- removed) { + p.removeProcedure(proc) + } - val callees = procedureCalleeNames(next._2) + for (elem <- p.procedures.filter(c => c.calls.exists(s => removed.contains(s)))) { + // last layer is analysed only as specifications so we remove the body for anything that calls + // a function we have removed + + elem.clearBlocks() + assert(elem.entryBlock.isEmpty) + assert(elem.returnBlock.isEmpty) - toVisit.addAll(callees.diff(reachableNames.keySet).map(c => (next._1 + 1, c))) - callees.foreach(c => addName(next._1 + 1, c)) } + assert(invariant.blocksUniqueToEachProcedure(p)) + assert(invariant.cfgCorrect(p)) } - assert(invariant.cfgCorrect(p)) - val removed = p.procedures.filterNot(f => reachableNames.keySet.contains(f)).toSet - // p.procedures = p.procedures.filter(f => reachableNames.keySet.contains(f.name)) - for (proc <- removed) { - p.removeProcedure(proc) - } - - for (elem <- p.procedures.filter(c => c.calls.exists(s => removed.contains(s)))) { - // last layer is analysed only as specifications so we remove the body for anything that calls - // a function we have removed - elem.clearBlocks() - assert(elem.entryBlock.isEmpty) - assert(elem.returnBlock.isEmpty) + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { + stripUnreachableFunctions(ctx.program, config.loading.procedureTrimDepth) + Set.empty + } + override protected def preRun(ctx: IRContext): Unit = { + Logger.info("[!] Stripping unreachable") + before = ctx.program.procedures.size } - assert(invariant.blocksUniqueToEachProcedure(p)) - assert(invariant.cfgCorrect(p)) + override protected def postRun(ctx: IRContext): Unit = { + Logger.info( + s"[!] Removed ${before - ctx.program.procedures.size} functions (${ctx.program.procedures.size} remaining)" + ) + val dupProcNames = ctx.program.procedures.groupBy(_.name).filter((_, p) => p.size > 1).toList.flatMap(_(1)) + assert(dupProcNames.isEmpty) + } } diff --git a/src/main/scala/util/Runner.scala b/src/main/scala/util/Runner.scala index 1f77a29882..9d90b58ae4 100644 --- a/src/main/scala/util/Runner.scala +++ b/src/main/scala/util/Runner.scala @@ -1,80 +1,716 @@ package util import ir.* +import ir.transforms.* +import analysis.* -trait Transform[AS <: AnalysisSet](val name: String) { +// --- Base Classes ---------------------------------------------------------------------------------------------------- + +/** Provides a consistent interface for IR transforms. + * + * @param name The name of this transform. + */ +trait Transform(val name: String) { + + // the performance of each transform is implicitly tested val t = PerformanceTimer(name) - protected def implementation: (IRContext, AS) => Unit + // code to run before the transform implementation, such as logging information + protected def preRun(ctx: IRContext): Unit = {} + + // code to run after the transform implementation, such as logging information or assertions + protected def postRun(ctx: IRContext): Unit = {} + + /** Override this method to implement the logic for your transform. + * + * @param ctx The IR to be modified in-place. + * @param analyses Use this to access the results of static analyses. Any results not yet generated will be produced + * automatically and then cached in the manager for later retrieval. + * + * @return The set of analyses that are *preserved* after the transform. To clear all analyses after the transform is + * invoked, return Set.empty. (Note that this will negatively impact performance.) To preserve all analyses, return + * analyses.getAll(). + */ + protected def implementation: (ctx: IRContext, analyses: AnalysisManager) => Set[analyses.Memoizer] - def apply(ctx: IRContext, analyses: AS) = { + // instances of transforms can be directly called to invoke this method + def apply(ctx: IRContext, analyses: AnalysisManager): Unit = { + if (analyses.program ne ctx.program) { + // the program we are transforming should be the same one for which the analysis results were produced + throw new RuntimeException(s"Transform $name was passed an AnalysisManager of an IR Program with a different " + + s"reference value than the program being transformed.") + } + preRun(ctx) t.checkPoint("start") - implementation(ctx, analyses) + val toPreserve = implementation(ctx, analyses) t.checkPoint("end") - analyses.invalidate() + postRun(ctx) + analyses.invalidateAllExcept(toPreserve) } } -trait StaticAnalysis(val name: String) { - type ReturnType +/** A transform can be a sequence of other transforms. We prefer using this over constructing transforms in the + * implementations of other transforms. + * + * @param name The name of this transform batch. + * @param transforms The sequence of other transforms that comprise this transform. + */ +trait TransformBatch(name: String, transforms: List[Transform]) extends Transform(name) { + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { + // simply apply each transform in-turn + transforms.foreach(_(ctx, analyses)) + Set.empty + } +} +/** Provides a consistent interface for static analyses. + * Similar to Transform, but returns a result rather than modifying the IR in-place. + * + * @tparam ReturnType The type of the result that this analysis generates. + * @param name The name of this analysis. + */ +trait StaticAnalysis[ReturnType](val name: String) { + val t = PerformanceTimer(name) - protected def implementation: Program => ReturnType + protected def preRun(): Unit = {} - def apply(prog: Program): ReturnType = { + protected def postRun(): Unit = {} + + protected def implementation: (Program, AnalysisManager) => ReturnType + + def apply(prog: Program, analyses: AnalysisManager): ReturnType = { + if (analyses.program ne prog) { + throw new RuntimeException(s"Analysis $name was passed an AnalysisManager of an IR Program with a different " + + s"reference value than the program being transformed.") + } + preRun() t.checkPoint("start") - implementation(prog) + val ret = implementation(prog, analyses) t.checkPoint("end") + postRun(ret) + ret } } -class AnalysisSet(program: Program) { +// + +/** Analysis manager for caching and invalidating analysis results. + * + * @param program Each analysis manager is defined with respect to a particular program reference, which is always + * passed to the static analyses invoked via this manager. This ensures that the cached analysis results all relate to + * the same program reference. It is then the responsibility of Transforms to clear these results when they are + * invalidated by a modification to this program. + */ +class AnalysisManager(program: Program) { + + // memoizer to wrap static analyses and cache their results + private class Memoizer[ReturnType](analysis: StaticAnalysis[ReturnType]) { + + private var memo: Option[ReturnType] = None - private class Memoizer(analysis: StaticAnalysis, program: Program) { + def invalidate() = { memo = None } - private var memo: Option[analysis.ReturnType] = None - private var preserved: Boolean = false + // allows this memoizer to be called like a function + def apply(): ReturnType = { + // pass this analysis manager and its associated program to the static analysis + if memo.isEmpty then memo = Some(analysis(program, AnalysisManager.this)) + memo.get + } + } + + // keep track of all memoizers to ensure we can invalidate all of them + private val memoizers: Set[Memoizer[?]] = Nil + + // private helper function for creating and storing memoizers + private def register[ReturnType](analysis: StaticAnalysis[ReturnType]): Memoizer[ReturnType] = { + val mem = Memoizer[ReturnType](analysis) + memoizers ::= mem + return mem + } + + // list of memoizers - these can be directly called via this manager, e.g. val result = manager.exampleAnalysis() + val intraProcConstProp = register(IntraProcConstantPropagationAnalysis()) + val interProcConstProp = register(InterProcConstantPropagationAnalysis()) + val memoryRegionResult = register(MemoryRegionAnalysisSolverAnalysis()) + val vsaResult = register(ValueSetAnalysisSolverAnalysis()) + val interLiveVarsResults = register(/* todo */) + val paramResults = register(/* todo */) + val steensgaardSolver = register(/* todo */) // fixme: merge these into one analysis result? + val steensgaardPointsTo = register(/* todo */) + val steensgaardCallSiteSummary = register(/* todo */) + val mmmResults = register(/* todo */) + val reachingDefs = register(/* todo */) + val regionInjector = register(/* todo */) + val symbolicAddresses = register(/* todo */) + val localDSA = register(/* todo */) + val bottomUpDSA = register(/* todo */) + val topDownDSA = register(/* todo */) + val writesToResult = register(/* todo */) + val ssaResults = register(/* todo */) + val graResult = register(/* todo */) + val intraDomain = register(/* todo */) + val interDomain = register(/* todo */) + + // clears the cached results of all analyses except for those in the given set + def invalidateAllExcept(exceptions: Set[Memoizer]): Unit = + memoizers.filterNot(exceptions.contains).foreach(_.invalidate()) + + // useful to pass to 'invalidateAllExcept' when we want to preserve all or nearly all results after a transform + def getAll(): Set[Memoizer[?]] = memoizers // safe to directly return non-mutable set +} - def invalidate() = { - if (preserved) { - preserved = false - } else { - memo = None +// --- DoCleanup Transform --------------------------------------------------------------------------------------------- + +/** Initial cleanup before analysis. + */ +class DoCleanup(doSimplify: Boolean = false) extends TransformBatch("DoCleanup", List( + MakeProcEntriesNonLoops(), + CoalesceBlocksFixpoint(), + ApplyRpo(), + ReplaceJumpsInNonReturningProcs(), + ReplaceReturnsTransform(doSimplify), + RemoveExternalFunctionReferences() +)) { + override protected def preRun(): Unit = { + Logger.info("[!] Removing external function calls") + } + + override protected def postRun(): Unit = { + assert(invariant.singleCallBlockEnd(ctx.program)) + assert(invariant.cfgCorrect(ctx.program)) + assert(invariant.blocksUniqueToEachProcedure(ctx.program)) + assert(invariant.procEntryNoIncoming(ctx.program)) + } +} + +// --- PrepareForTranslation Transform --------------------------------------------------------------------------------- + +/** Cull unneccessary information that does not need to be included in the translation, and infer stack regions, and + * add in modifies from the spec. + */ +class PrepareForTranslation(config: BASILConfig) extends TransformBatch("PrepareForTranslation", List( + DetermineRelevantMemory(config), + StripUnreachableFunctions(config), + StackSubstitution(config), + SetModifies(), + RenameBoogieKeywords() +)) { + override protected def postRun(ctx: IRContext): Unit = { + // check all blocks with an atomic section exist within the same procedure + val visited = mutable.Set[Block]() + for (p <- ctx.program.procedures) { + for (b <- p.blocks) { + if (!visited.contains(b)) { + if (b.atomicSection.isDefined) { + b.atomicSection.get.getBlocks.foreach { a => assert(a.parent == p) } + visited.addAll(b.atomicSection.get.getBlocks) + } + visited.addOne(b) + } } } + } +} + +// todo: not sure where to put this +class DetermineRelevantMemory(config: BASILConfig) extends Transform("DetermineRelevantMemory") { + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { + if (config.staticAnalysis.isEmpty || (config.staticAnalysis.get.memoryRegions == MemoryRegionsMode.Disabled)) { + ctx.program.determineRelevantMemory(ctx.globalOffsets) + } + Set.empty + } +} + +// todo: not sure where to put this +class StackSubstitution(config: BASILConfig) extends Transform("StackSubstitution") { + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { + if (!config.memoryTransform && + (config.staticAnalysis.isEmpty || (config.staticAnalysis.get.memoryRegions == MemoryRegionsMode.Disabled))) { + StackSubstituter().visitProgram(ctx.program) + } + Set.empty + } +} + +// todo: not sure where to put this +class SetModifies extends Transform("SetModifies") { + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { + val specModifies = ctx.specification.subroutines.map(s => s.name -> s.modifies).toMap + ctx.program.setModifies(specModifies) + Set.empty + } +} + +// todo: not sure where to put this +class RenameBoogieKeywords extends Transform("RenameBoogieKeywords") { + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { + Renamer(boogieReserved).visitProgram(ctx.program) + Set.empty + } + + override protected def postRun(ctx: IRContext): Unit = { + assert(invariant.singleCallBlockEnd(ctx.program)) + } +} + +// --- GenerateProcedureSummaries -------------------------------------------------------------------------------------- + +class GenerateProcedureSummaries(simplified: Boolean = false) extends Transform("GenerateProcedureSummaries") { + // (?) removed the 'modified' variable that we used to return from this function + // (?) removed the 'IRProgram' parameter - using ctx.program instead + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { + val prog = ctx.program + // Need to know modifies clauses to generate summaries, but this is probably out of place (fixme) + val specModifies = ctx.specification.subroutines.map(s => s.name -> s.modifies).toMap + prog.setModifies(specModifies) + + val summaryGenerator = SummaryGenerator(prog, simplified) + for procedure <- prog.procedures if procedure != prog.mainProcedure do + procedure.requires = summaryGenerator.generateRequires(procedure) + procedure.ensures = summaryGenerator.generateEnsures(procedure) + + Set.empty + } +} + +// --- GenerateRelyGuaranteeConditions --------------------------------------------------------------------------------- + +class GenerateRgConditions(threads: List[Procedure]) extends Transform("GenerateRgConditions") { + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { + /* Todo: For the moment we are printing these to stdout, but in future we'd + like to add them to the IR. */ + type StateLatticeElement = LatticeMap[Variable, analysis.Interval] + type InterferenceLatticeElement = Map[Variable, StateLatticeElement] + val stateLattice = IntervalLatticeExtension() + val stateTransfer = SignedIntervalDomain().transfer + val intDom = ConditionalWritesDomain[StateLatticeElement](stateLattice, stateTransfer) + val relyGuarantees = + RelyGuaranteeGenerator[InterferenceLatticeElement, StateLatticeElement](intDom).generate(threads) + for ((p, (rely, guar)) <- relyGuarantees) { + StaticAnalysisLogger.info("--- " + p.procName + " " + "-" * 50 + "\n") + StaticAnalysisLogger.info("Rely:") + StaticAnalysisLogger.info(intDom.toString(rely) + "\n") + StaticAnalysisLogger.info("Guarantee:") + StaticAnalysisLogger.info(intDom.toString(guar) + "\n") + } + Set.empty + } +} + +// --- DoSimplify Transform -------------------------------------------------------------------------------------------- + +/* + +class DoSimplify(config: BASILConfig) extends TransformBatch("DoSimplify", List( + // (?) removed logger command: Logger.info("[!] Running Simplify") + IdentifyLoops(), + NormaliseBlockNames(), + SortProcedures(), + LiftSvComp(), + DumpIlBeforeSimp(config), + ApplyRpo(), + RemoveEmptyBlocks(), + CoalesceBlocks(), + RemoveEmptyBlocks(), + LogBlockgraphBeforeDsa(config), + LogIrBeforeDsa(config), + OnePassDsa(), + InlinePLTLaunchpad(), + RemoveEmptyBlocks(), + LogBlockgraphAfterDsa(config), + LogIrAfterDsa(config), + LogDsaResultsAndDoChecks(), + LogIrBeforeCopyProp(), + LogBlockGraphBeforeCopyProp(), + CopyProp(), + FixUpGuards(), + RemoveDuplicateGuards(), + LogBlockGraphAfterSimp(), + LiftLinuxAssertFail(), + LogIrAfterCopyProp(), + DsaCheck(), + LogIrAfterSlices(), + LogSimplificationValidation() +)) + +class IdentifyLoops extends Transform("IdentifyLoops") { + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { + val foundLoops = LoopDetector.identify_loops(ctx.program) + val newLoops = foundLoops.reducibleTransformIR() + newLoops.updateIrWithLoops() + Set.empty + } +} + +class NormaliseBlockNames extends Transform("NormaliseBlockNames") { + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { + ctx.program.procedures.foreach(_.normaliseBlockNames()) + Set.empty + } +} - def apply(): analysis.ReturnType = { - if (memo.isEmpty) { - memo = Some(analysis(program)) +class SortProcedures extends Transform("SortProcedures") { + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { + ctx.program.sortProceduresRPO() + Set.empty + } +} + +class LiftSvComp extends Transform("LiftSvComp") { + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { + transforms.liftSVComp(ctx.program) + Set.empty + } +} + +// ??? +class DumpIlBeforeSimp(config: BASILConfig) extends Transform("DumpIlBeforeSimp") { + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { + config.foreach { + _.dumpILToPath.foreach { s => + DebugDumpIRLogger.writeToFile(File(s"${s}_il-before-simp.il"), pp_prog(ctx.program)) } - memo.get } + Set.empty + } +} - def preserve() = { preserved = true } +class RemoveEmptyBlocks extends Transform("RemoveEmptyBlocks") { + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { + transforms.removeEmptyBlocks(ctx.program) + Set.empty } +} - private var memoizers: List[Memoizer[?]] = Nil +class LogBlockgraphBeforeDsa(config: BASILConfig) extends Transform("LogBlockgraphBeforeDsa") { + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { + config.foreach { + _.analysisDotPath.foreach { s => + DebugDumpIRLogger.writeToFile(File(s"${s}_blockgraph-before-dsa.dot"), dotBlockGraph(ctx.program.mainProcedure)) + } + } + Set.empty + } +} - def register(analysis: StaticAnalysis): Memoizer = { - val mem = Memoizer(analysis, program) - memoizers ::= mem - return mem +class LogIrBeforeDsa(config: BASILConfig) extends Transform("LogIrBeforeDsa") { + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { + config.foreach { + _.dumpILToPath.foreach { s => + DebugDumpIRLogger.writeToFile(File(s"${s}_il-before-dsa.il"), pp_prog(ctx.program)) + } + } + Set.empty + } +} + +class LogBlockgraphAfterDsa(config: BASILConfig) extends Transform("LogBlockgraphAfterDsa") { + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { + config.foreach { + _.analysisDotPath.foreach { s => + AnalysisResultDotLogger.writeToFile( + File(s"${s}_blockgraph-after-dsa.dot"), + dotBlockGraph( + program, + (program.collect { case b: Block => + b -> pp_block(b) + }).toMap + ) + ) + } + } + Set.empty + } +} + +class LogIrAfterDsa(config: BASILConfig) extends Transform("LogIrAfterDsa") { + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { + config.foreach { + _.dumpILToPath.foreach { s => + DebugDumpIRLogger.writeToFile(File(s"${s}_il-after-dsa.il"), pp_prog(program)) + } + } + Set.empty + } +} + +class OnePassDsa extends Transform("OnePassDsa") { + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { + transforms.OnePassDSA().applyTransform(ctx.program) + Set.empty + } +} + +class InlinePLTLaunchpad extends Transform("InlinePLTLaunchpad") { + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { + transforms.inlinePLTLaunchpad(ctx.program) + Set.empty + } +} + +class LogDsaResultsAndDoChecks extends Transform("LogDsaResultsAndDoChecks") { + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { + if (ir.eval.SimplifyValidation.validate) { + Logger.info("DSA no uninitialised") + assert(invariant.allVariablesAssignedIndex(ctx.program)) + // Logger.info("Live vars difftest") + // val tipLiveVars : Map[CFGPosition, Set[Variable]] = analysis.IntraLiveVarsAnalysis(program).analyze() + // assert(program.procedures.forall(transforms.difftestLiveVars(_, tipLiveVars))) + + Logger.info("DSA Check") + val x = program.procedures.forall(transforms.rdDSAProperty) + assert(x) + Logger.info("DSA Check passed") + assert(invariant.singleCallBlockEnd(program)) + assert(invariant.cfgCorrect(program)) + assert(invariant.blocksUniqueToEachProcedure(program)) + } + Set.empty + } +} + +class LogIrBeforeCopyProp extends Transform("LogIrBeforeCopyProp") { + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { + config.foreach { + _.dumpILToPath.foreach { s => + DebugDumpIRLogger.writeToFile(File(s"${s}_il-before-copyprop.il"), pp_prog(ctx.program)) + } + } + Set.empty + } +} + +class LogBlockGraphBeforeCopyProp extends Transform("LogBlockGraphBeforeCopyProp") { + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { + config.foreach { + _.analysisDotPath.foreach { s => + AnalysisResultDotLogger.writeToFile( + File(s"${s}_blockgraph-before-copyprop.dot"), + dotBlockGraph(ctx.program.mainProcedure) + ) + } + } + Set.empty + } +} + +class CopyProp extends Transform("CopyProp") { + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { + Logger.info("Copyprop Start") + transforms.copyPropParamFixedPoint(ctx.program, ctx.globalOffsets) + Set.empty + } +} + +class FixUpGuards extends Transforms("FixUpGuards") { + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { + transforms.fixupGuards(ctx.program) + Set.empty + } +} + +class RemoveDuplicateGuards extends Transforms("RemoveDuplicateGuards") { + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { + transforms.removeDuplicateGuard(ctx.program) + Set.empty + } +} + +class LogBlockGraphAfterSimp extends Transform("LogBlockGraphAfterSimp") { + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { + config.foreach { + _.analysisDotPath.foreach { s => + AnalysisResultDotLogger.writeToFile( + File(s"${s}_blockgraph-after-simp.dot"), + dotBlockGraph(program.mainProcedure) + ) + } + } + Set.empty + } +} + +class LiftLinuxAssertFail extends Transform("LogBlockGraphAfterSimp") { + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { + transforms.liftLinuxAssertFail(ctx) + assert(invariant.blockUniqueLabels(ctx.program)) // ??? should this be here? + // (?) removed command: Logger.info(s"CopyProp ${timer.checkPoint("Simplify")} ms ") + Set.empty + } +} + +class LogIrAfterCopyProp extends Transform("LogBlockGraphAfterSimp") { + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { + config.foreach { + _.dumpILToPath.foreach { s => + DebugDumpIRLogger.writeToFile(File(s"${s}_il-after-copyprop.il"), pp_prog(program)) + } + } + Set.empty + } +} + +class DsaCheck extends Transform("LogBlockGraphAfterSimp") { + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { + if (ir.eval.SimplifyValidation.validate) { + Logger.info("DSA Check (after transform)") + val x = program.procedures.forall(transforms.rdDSAProperty) + assert(x) + Logger.info("DSA Check succeeded") + } + Set.empty + } +} + +class LogIrAfterSlices extends Transform("LogBlockGraphAfterSimp") { + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { + config.foreach { + _.dumpILToPath.foreach { s => + DebugDumpIRLogger.writeToFile(File(s"${s}_il-after-slices.il"), pp_prog(program)) + } + } + Set.empty + } +} + +class LogSimplificationValidation extends Transform("LogSimplificationValidation") { + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { + if (ir.eval.SimplifyValidation.validate) { + Logger.info("[!] Simplify :: Writing simplification validation") + val w = BufferedWriter(FileWriter("rewrites.smt2")) + ir.eval.SimplifyValidation.makeValidation(w) + w.close() + } + // (?) removed command: Logger.info("[!] Simplify :: finished") + Set.empty } +} + + +// --- Static Analyses ------------------------------------------------------------------------------------------------- + +class IntraProcConstantPropagationAnalysis extends StaticAnalysis[ + Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]] +]("IntraProcConstantPropagation") { + def implementation(prog: Program, analyses: AnalysisManager) = IntraProcConstantPropagation(prog).analyze() +} + +class InterProcConstantPropagationAnalysis extends StaticAnalysis[ + Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]] +]("InterProcConstantPropagation") { + def implementation(prog: Program, analyses: AnalysisManager) = InterProcConstantPropagation(prog).analyze() +} + +class MemoryRegionAnalysisSolverAnalysis extends StaticAnalysis[ + Map[CFGPosition, ((Set[StackRegion], Set[Variable]), Set[HeapRegion])] +]("MemoryRegionAnalysis") { + def implementation(prog: Program, analyses: AnalysisManager) = + MemoryRegionAnalysisSolver( + prog, + analyses.intraDomain(), // computeDomain(IntraProcIRCursor, prog.procedures).toSet, + analyses.interProcConstProp(), + analyses.reachingDefs(), + analyses.graResult(), + analyses.mmmResults(), + analyses.vsaResult() + ).analyze() +} - def invalidate() = { memoizers.foreach(_.invalidate()) } +class ValueSetAnalysisSolverAnalysis extends StaticAnalysis[ + Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]] +]("VsaAnalysis") { + def implementation(prog: Program) = + ValueSetAnalysisSolverAnalysis(prog, analyses.mmmResults()).analyze() } -class ExampleAnalysisSet(p: Program) extends AnalysisSet(p) { - val exampleAnalysis1 = register(ExampleAnalysis()) - val exampleAnalysis2 = register(ExampleAnalysis()) - val exampleAnalysis3 = register(ExampleAnalysis()) +class InterLiveVarsAnalysis extends StaticAnalysis[ + Map[CFGPosition, Map[Variable, TwoElement]], +]("InterLiveVarsAnalysis") { + def implementation(prog: Program, analyses: AnalysisManager) = InterLiveVarsAnalysis(prog).analyze() } -def example() = { - val p = Program() - val exampleAnalysisSet = ExampleAnalysisSet(p) - val result = exampleAnalysisSet.exampleAnalysis1() - exampleAnalysisSet.exampleAnalysis1.preserve() - exampleAnalysisSet.invalidate() +class ParamResultsAnalysis extends StaticAnalysis[ + Map[Procedure, Set[Variable]] +]("ParamResultsAnalysis") { + def implementation(prog: Program, analyses: AnalysisManager) = ParamAnalysis(prog).analyze() } + +class SteensgaardGetSolver extends StaticAnalysis[ + Map[RegisterWrapperEqualSets, Set[RegisterWrapperEqualSets | MemoryRegion]] +]("SteensgaardGetSolver") { + def implementation(prog: Program, analyses: AnalysisManager) = + val solver = InterprocSteensgaardAnalysis( + analyses.interDomain(), // computeDomain(InterProcIRCursor, prog.procedures) + analyses.mmmResults(), + analyses.ssaResults() + ) + solver.analyze() + solver // ??? +} + +class SteensgaardGetPointsTo extends StaticAnalysis[ + Map[RegisterWrapperEqualSets, Set[RegisterWrapperEqualSets | MemoryRegion]] +]("SteensgaardGetPointsTo") { + def implementation(prog: Program, analyses: AnalysisManager) = analyses.steensgaardSolver().pointsTo() +} + +class SteensgaardGetCallSiteSummary extends StaticAnalysis[ + mutable.Map[DirectCall, Map[RegisterWrapperEqualSets, Set[RegisterWrapperEqualSets | MemoryRegion]]] +]("SteensgaardGetCallSiteSummary") { + def implementation(prog: Program, analyses: AnalysisManager) = analyses.steensgaardSolver().callSiteSummary +} + +class MmmResults extends StaticAnalysis[ + MemoryModelMap +]("MmmResults") { + def implementation(prog: Program, analyses: AnalysisManager) = + val mmm = MemoryModelMap( + globalOffsets, + mergedSubroutines, + globalAddresses, + globalSizes + ) + mmm.preLoadGlobals() + mmm.setCallSiteSummaries(steensgaardSolver.callSiteSummary) + mmm + +} + +class reachingDefs extends StaticAnalysis("ReachingDefs") { + ReturnType = Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])] + def implementation(prog: Program, analyses: AnalysisManager) = _(prog).analyze() +} + +class regionInjector extends StaticAnalysis[Option[RegionInjector]]("RegionInjector") { + def implementation(prog: Program, analyses: AnalysisManager) = _(prog).analyze() +} + +class symbolicAddresses extends StaticAnalysis[Map[CFGPosition, Map[SymbolicAddress, TwoElement]]]("SymbolicAddresses") { + def implementation(prog: Program, analyses: AnalysisManager) = _(prog).analyze() +} + +class localDSA extends StaticAnalysis[Map[Procedure, Graph]]("LocalDSA") { + def implementation(prog: Program, analyses: AnalysisManager) = _(prog).analyze() +} + +class bottomUpDSA extends StaticAnalysis[Map[Procedure, Graph]]("BottomUpDSA") { + def implementation(prog: Program, analyses: AnalysisManager) = _(prog).analyze() +} + +class topDownDSA extends StaticAnalysis[Map[Procedure, Graph]]("TopDownDSA") { + def implementation(prog: Program, analyses: AnalysisManager) = _(prog).analyze() +} + +class writesToResult extends StaticAnalysis[Map[Procedure, Set[Register]]]("WritesTo") { + def implementation(prog: Program, analyses: AnalysisManager) = _(prog).analyze() +} + +class ssaResults extends StaticAnalysis[ + Map[CFGPosition, (Map[Variable, FlatElement[Int]], Map[Variable, FlatElement[Int]])] +]("SSA") { + def implementation(prog: Program, analyses: AnalysisManager) = _(prog).analyze() +} + +*/ From 5b01bea7c98f21d1949ad87fca76e4dfb5b30c76 Mon Sep 17 00:00:00 2001 From: James Tobler <64625414+j-tobler@users.noreply.github.com> Date: Fri, 30 May 2025 15:54:16 +1000 Subject: [PATCH 04/30] created new files for analyses and transforms; moved transforms to the appropriate locations --- src/main/scala/analysis/AnalysisManager.scala | 68 ++++ src/main/scala/analysis/StaticAnalysis.scala | 33 ++ .../StripUnreachableFunctions.scala | 10 + src/main/scala/ir/transforms/Transform.scala | 62 ++++ src/main/scala/util/RunUtils.scala | 258 +++++++-------- src/main/scala/util/Runner.scala | 297 ------------------ 6 files changed, 291 insertions(+), 437 deletions(-) create mode 100644 src/main/scala/analysis/AnalysisManager.scala create mode 100644 src/main/scala/analysis/StaticAnalysis.scala create mode 100644 src/main/scala/ir/transforms/Transform.scala diff --git a/src/main/scala/analysis/AnalysisManager.scala b/src/main/scala/analysis/AnalysisManager.scala new file mode 100644 index 0000000000..c46523a690 --- /dev/null +++ b/src/main/scala/analysis/AnalysisManager.scala @@ -0,0 +1,68 @@ +package analysis + +import ir.Program + +/** Analysis manager for caching and invalidating analysis results. + * + * @param program Each analysis manager is defined with respect to a particular program reference, which is always + * passed to the static analyses invoked via this manager. This ensures that the cached analysis results all relate to + * the same program reference. It is then the responsibility of Transforms to clear these results when they are + * invalidated by a modification to this program. + */ +class AnalysisManager(program: Program) { + + // memoizer to wrap static analyses and cache their results + private class Memoizer[ReturnType](analysis: StaticAnalysis[ReturnType]) { + + private var memo: Option[ReturnType] = None + + def invalidate() = { memo = None } + + // allows this memoizer to be called like a function + def apply(): ReturnType = { + // pass this analysis manager and its associated program to the static analysis + if memo.isEmpty then memo = Some(analysis(program, AnalysisManager.this)) + memo.get + } + } + + // keep track of all memoizers to ensure we can invalidate all of them + private val memoizers: Set[Memoizer[?]] = Nil + + // private helper function for creating and storing memoizers + private def register[ReturnType](analysis: StaticAnalysis[ReturnType]): Memoizer[ReturnType] = { + val mem = Memoizer[ReturnType](analysis) + memoizers ::= mem + return mem + } + + // list of memoizers - these can be directly called via this manager, e.g. val result = manager.exampleAnalysis() + val intraProcConstProp = register(IntraProcConstantPropagationAnalysis()) + val interProcConstProp = register(InterProcConstantPropagationAnalysis()) + val memoryRegionResult = register(MemoryRegionAnalysisSolverAnalysis()) + val vsaResult = register(ValueSetAnalysisSolverAnalysis()) + val interLiveVarsResults = register(/* todo */) + val paramResults = register(/* todo */) + val steensgaardSolver = register(/* todo */) // fixme: merge these into one analysis result? + val steensgaardPointsTo = register(/* todo */) + val steensgaardCallSiteSummary = register(/* todo */) + val mmmResults = register(/* todo */) + val reachingDefs = register(/* todo */) + val regionInjector = register(/* todo */) + val symbolicAddresses = register(/* todo */) + val localDSA = register(/* todo */) + val bottomUpDSA = register(/* todo */) + val topDownDSA = register(/* todo */) + val writesToResult = register(/* todo */) + val ssaResults = register(/* todo */) + val graResult = register(/* todo */) + val intraDomain = register(/* todo */) + val interDomain = register(/* todo */) + + // clears the cached results of all analyses except for those in the given set + def invalidateAllExcept(exceptions: Set[Memoizer]): Unit = + memoizers.filterNot(exceptions.contains).foreach(_.invalidate()) + + // useful to pass to 'invalidateAllExcept' when we want to preserve all or nearly all results after a transform + def getAll(): Set[Memoizer[?]] = memoizers // safe to directly return non-mutable set +} diff --git a/src/main/scala/analysis/StaticAnalysis.scala b/src/main/scala/analysis/StaticAnalysis.scala new file mode 100644 index 0000000000..7b1d6a9c42 --- /dev/null +++ b/src/main/scala/analysis/StaticAnalysis.scala @@ -0,0 +1,33 @@ +package analysis + +import ir.Program + +/** Provides a consistent interface for static analyses. + * Similar to Transform, but returns a result rather than modifying the IR in-place. + * + * @tparam ReturnType The type of the result that this analysis generates. + * @param name The name of this analysis. + */ +trait StaticAnalysis[ReturnType](val name: String) { + + val t = PerformanceTimer(name) + + protected def preRun(): Unit = {} + + protected def postRun(): Unit = {} + + protected def implementation: (Program, AnalysisManager) => ReturnType + + def apply(prog: Program, analyses: AnalysisManager): ReturnType = { + if (analyses.program ne prog) { + throw new RuntimeException(s"Analysis $name was passed an AnalysisManager of an IR Program with a different " + + s"reference value than the program being transformed.") + } + preRun() + t.checkPoint("start") + val ret = implementation(prog, analyses) + t.checkPoint("end") + postRun(ret) + ret + } +} diff --git a/src/main/scala/ir/transforms/StripUnreachableFunctions.scala b/src/main/scala/ir/transforms/StripUnreachableFunctions.scala index 01acfe3adf..7ec9ff168b 100644 --- a/src/main/scala/ir/transforms/StripUnreachableFunctions.scala +++ b/src/main/scala/ir/transforms/StripUnreachableFunctions.scala @@ -68,5 +68,15 @@ class StripUnreachableFunctions(config: BASILConfig) extends Transform("StripUnr ) val dupProcNames = ctx.program.procedures.groupBy(_.name).filter((_, p) => p.size > 1).toList.flatMap(_(1)) assert(dupProcNames.isEmpty) + + ctx.program.procedures.foreach(p => + p.blocks.foreach(b => { + b.jump match { + case GoTo(targs, _) if targs.isEmpty => + Logger.warn(s"block ${b.label} in subroutine ${p.name} has no outgoing edges") + case _ => () + } + }) + ) } } diff --git a/src/main/scala/ir/transforms/Transform.scala b/src/main/scala/ir/transforms/Transform.scala new file mode 100644 index 0000000000..ec0e4e32db --- /dev/null +++ b/src/main/scala/ir/transforms/Transform.scala @@ -0,0 +1,62 @@ +package ir.transforms + +import util.IRContext +import ir.Program +import analysis.AnalysisManager + +/** Provides a consistent interface for IR transforms. + * + * @param name The name of this transform. + */ +trait Transform(val name: String) { + + // the performance of each transform is implicitly tested + val t = PerformanceTimer(name) + + // code to run before the transform implementation, such as logging information + protected def preRun(ctx: IRContext): Unit = {} + + // code to run after the transform implementation, such as logging information or assertions + protected def postRun(ctx: IRContext): Unit = {} + + /** Override this method to implement the logic for your transform. + * + * @param ctx The IR to be modified in-place. + * @param analyses Use this to access the results of static analyses. Any results not yet generated will be produced + * automatically and then cached in the manager for later retrieval. + * + * @return The set of analyses that are *preserved* after the transform. To clear all analyses after the transform is + * invoked, return Set.empty. (Note that this will negatively impact performance.) To preserve all analyses, return + * analyses.getAll(). + */ + protected def implementation: (ctx: IRContext, analyses: AnalysisManager) => Set[analyses.Memoizer] + + // instances of transforms can be directly called to invoke this method + def apply(ctx: IRContext, analyses: AnalysisManager): Unit = { + if (analyses.program ne ctx.program) { + // the program we are transforming should be the same one for which the analysis results were produced + throw new RuntimeException(s"Transform $name was passed an AnalysisManager of an IR Program with a different " + + s"reference value than the program being transformed.") + } + preRun(ctx) + t.checkPoint("start") + val toPreserve = implementation(ctx, analyses) + t.checkPoint("end") + postRun(ctx) + analyses.invalidateAllExcept(toPreserve) + } +} + +/** A transform can be a sequence of other transforms. We prefer using this over constructing transforms in the + * implementations of other transforms. + * + * @param name The name of this transform batch. + * @param transforms The sequence of other transforms that comprise this transform. + */ +trait TransformBatch(name: String, transforms: List[Transform]) extends Transform(name) { + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { + // simply apply each transform in-turn + transforms.foreach(_(ctx, analyses)) + Set.empty + } +} diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index e1a32eda29..7568ebb9ee 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -248,160 +248,141 @@ object IRTransform { val boogieReserved: Set[String] = Set("free") /** Initial cleanup before analysis. - */ - def doCleanup(ctx: IRContext, doSimplify: Boolean = false): IRContext = { - Logger.info("[!] Removing external function calls") - // Remove external function references (e.g. @printf) - val externalNames = ctx.externalFunctions.map(e => e.name) - val externalNamesLibRemoved = mutable.Set[String]() - externalNamesLibRemoved.addAll(externalNames) - - for (e <- externalNames) { - if (e.contains('@')) { - externalNamesLibRemoved.add(e.split('@')(0)) - } - } - - ctx.program.procedures.foreach(ir.transforms.makeProcEntryNonLoop) - - // useful for ReplaceReturns - // (pushes single block with `Unreachable` into its predecessor) - while (transforms.coalesceBlocks(ctx.program)) {} - - transforms.applyRPO(ctx.program) - val nonReturning = transforms.findDefinitelyExits(ctx.program) - ctx.program.mainProcedure.foreach { - case d: DirectCall if nonReturning.nonreturning.contains(d.target) => d.parent.replaceJump(Return()) - case _ => + */ + class DoCleanup(doSimplify: Boolean = false) extends TransformBatch("DoCleanup", List( + MakeProcEntriesNonLoops(), + CoalesceBlocksFixpoint(), + ApplyRpo(), + ReplaceJumpsInNonReturningProcs(), + ReplaceReturnsTransform(doSimplify), + RemoveExternalFunctionReferences() + )) { + override protected def preRun(): Unit = { + Logger.info("[!] Removing external function calls") // fixme: seems odd? } - // FIXME: Main will often maintain the stack by loading R30 from the caller's stack frame - // before returning, which makes the R30 assertin faile. Hence we currently skip this - // assertion for main, instead we should precondition the stack layout before main - // but the interaction between spec and memory regions is nontrivial currently - cilvisitor.visit_prog( - transforms.ReplaceReturns(proc => doSimplify && ctx.program.mainProcedure != proc), - ctx.program - ) - - transforms.addReturnBlocks(ctx.program, insertR30InvariantAssertion = _ => doSimplify) - cilvisitor.visit_prog(transforms.ConvertSingleReturn(), ctx.program) - - val externalRemover = ExternalRemover(externalNamesLibRemoved.toSet) - externalRemover.visitProgram(ctx.program) - for (p <- ctx.program.procedures) { - p.isExternal = Some( - ctx.externalFunctions.exists(e => e.name == p.procName || p.address.contains(e.offset)) || p.isExternal - .getOrElse(false) - ) + override protected def postRun(): Unit = { + assert(invariant.singleCallBlockEnd(ctx.program)) + assert(invariant.cfgCorrect(ctx.program)) + assert(invariant.blocksUniqueToEachProcedure(ctx.program)) + assert(invariant.procEntryNoIncoming(ctx.program)) } - - assert(invariant.singleCallBlockEnd(ctx.program)) - assert(invariant.cfgCorrect(ctx.program)) - assert(invariant.blocksUniqueToEachProcedure(ctx.program)) - assert(invariant.procEntryNoIncoming(ctx.program)) - ctx } /** Cull unneccessary information that does not need to be included in the translation, and infer stack regions, and * add in modifies from the spec. */ - def prepareForTranslation(config: BASILConfig, ctx: IRContext): Unit = { - if (config.staticAnalysis.isEmpty || (config.staticAnalysis.get.memoryRegions == MemoryRegionsMode.Disabled)) { - ctx.program.determineRelevantMemory(ctx.globalOffsets) + class PrepareForTranslation(config: BASILConfig) extends TransformBatch("PrepareForTranslation", List( + DetermineRelevantMemory(config), + StripUnreachableFunctions(config), + StackSubstitution(config), + SetModifies(), + RenameBoogieKeywords() + )) { + override protected def postRun(ctx: IRContext): Unit = { + // check all blocks with an atomic section exist within the same procedure + val visited = mutable.Set[Block]() + for (p <- ctx.program.procedures) { + for (b <- p.blocks) { + if (!visited.contains(b)) { + if (b.atomicSection.isDefined) { + b.atomicSection.get.getBlocks.foreach { a => assert(a.parent == p) } + visited.addAll(b.atomicSection.get.getBlocks) + } + visited.addOne(b) + } + } + } } - - Logger.info("[!] Stripping unreachable") - val before = ctx.program.procedures.size - transforms.stripUnreachableFunctions(ctx.program, config.loading.procedureTrimDepth) - Logger.info( - s"[!] Removed ${before - ctx.program.procedures.size} functions (${ctx.program.procedures.size} remaining)" - ) - val dupProcNames = ctx.program.procedures.groupBy(_.name).filter((_, p) => p.size > 1).toList.flatMap(_(1)) - assert(dupProcNames.isEmpty) - - ctx.program.procedures.foreach(p => - p.blocks.foreach(b => { - b.jump match { - case GoTo(targs, _) if targs.isEmpty => - Logger.warn(s"block ${b.label} in subroutine ${p.name} has no outgoing edges") - case _ => () + + // todo: not sure where to put this + class DetermineRelevantMemory(config: BASILConfig) extends Transform("DetermineRelevantMemory") { + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { + if (config.staticAnalysis.isEmpty || (config.staticAnalysis.get.memoryRegions == MemoryRegionsMode.Disabled)) { + ctx.program.determineRelevantMemory(ctx.globalOffsets) } - }) - ) - - if ( - !config.memoryTransform && (config.staticAnalysis.isEmpty || (config.staticAnalysis.get.memoryRegions == MemoryRegionsMode.Disabled)) - ) { - val stackIdentification = StackSubstituter() - stackIdentification.visitProgram(ctx.program) + Set.empty + } } - val specModifies = ctx.specification.subroutines.map(s => s.name -> s.modifies).toMap - ctx.program.setModifies(specModifies) - - val renamer = Renamer(boogieReserved) - renamer.visitProgram(ctx.program) - - assert(invariant.singleCallBlockEnd(ctx.program)) - - // check all blocks with an atomic section exist within the same procedure - val visited = mutable.Set[Block]() - for (p <- ctx.program.procedures) { - for (b <- p.blocks) { - if (!visited.contains(b)) { - if (b.atomicSection.isDefined) { - b.atomicSection.get.getBlocks.foreach { a => assert(a.parent == p) } - visited.addAll(b.atomicSection.get.getBlocks) - } - visited.addOne(b) + // todo: not sure where to put this + class StackSubstitution(config: BASILConfig) extends Transform("StackSubstitution") { + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { + if (!config.memoryTransform && + (config.staticAnalysis.isEmpty || (config.staticAnalysis.get.memoryRegions == MemoryRegionsMode.Disabled))) { + StackSubstituter().visitProgram(ctx.program) } + Set.empty } } - } - def generateProcedureSummaries(ctx: IRContext, IRProgram: Program, simplified: Boolean = false): Boolean = { - var modified = false - // Need to know modifies clauses to generate summaries, but this is probably out of place - val specModifies = ctx.specification.subroutines.map(s => s.name -> s.modifies).toMap - ctx.program.setModifies(specModifies) + // todo: not sure where to put this + class SetModifies extends Transform("SetModifies") { + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { + val specModifies = ctx.specification.subroutines.map(s => s.name -> s.modifies).toMap + ctx.program.setModifies(specModifies) + Set.empty + } + } - val summaryGenerator = SummaryGenerator(IRProgram, simplified) - IRProgram.procedures - .filter { p => - p != IRProgram.mainProcedure + // todo: not sure where to put this + class RenameBoogieKeywords extends Transform("RenameBoogieKeywords") { + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { + Renamer(boogieReserved).visitProgram(ctx.program) + Set.empty } - .foreach { procedure => - { - val req = summaryGenerator.generateRequires(procedure) - modified = modified | procedure.requires != req - procedure.requires = req - - val ens = summaryGenerator.generateEnsures(procedure) - modified = modified | procedure.ensures != ens - procedure.ensures = ens - } + + override protected def postRun(ctx: IRContext): Unit = { + assert(invariant.singleCallBlockEnd(ctx.program)) } + } + } + + class GenerateProcedureSummaries(simplified: Boolean = false) extends Transform("GenerateProcedureSummaries") { + // (?) removed the 'modified' variable that we used to return from this function + // (?) removed the 'IRProgram' parameter - using ctx.program instead + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { + val prog = ctx.program + // Need to know modifies clauses to generate summaries, but this is probably out of place (fixme) + val specModifies = ctx.specification.subroutines.map(s => s.name -> s.modifies).toMap + prog.setModifies(specModifies) + + val summaryGenerator = SummaryGenerator(prog, simplified) + for procedure <- prog.procedures if procedure != prog.mainProcedure do + procedure.requires = summaryGenerator.generateRequires(procedure) + procedure.ensures = summaryGenerator.generateEnsures(procedure) + + Set.empty + } - modified + override protected def preRun(ctx: IRContext): Unit = { + StaticAnalysisLogger.info("[!] Generating Procedure Summaries") + } } - def generateRelyGuaranteeConditions(threads: List[Procedure]): Unit = { - /* Todo: For the moment we are printing these to stdout, but in future we'd - like to add them to the IR. */ - type StateLatticeElement = LatticeMap[Variable, analysis.Interval] - type InterferenceLatticeElement = Map[Variable, StateLatticeElement] - val stateLattice = IntervalLatticeExtension() - val stateTransfer = SignedIntervalDomain().transfer - val intDom = ConditionalWritesDomain[StateLatticeElement](stateLattice, stateTransfer) - val relyGuarantees = - RelyGuaranteeGenerator[InterferenceLatticeElement, StateLatticeElement](intDom).generate(threads) - for ((p, (rely, guar)) <- relyGuarantees) { - StaticAnalysisLogger.info("--- " + p.procName + " " + "-" * 50 + "\n") - StaticAnalysisLogger.info("Rely:") - StaticAnalysisLogger.info(intDom.toString(rely) + "\n") - StaticAnalysisLogger.info("Guarantee:") - StaticAnalysisLogger.info(intDom.toString(guar) + "\n") + class GenerateRgConditions(threads: List[Procedure]) extends Transform("GenerateRgConditions") { + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { + /* Todo: For the moment we are printing these to stdout, but in future we'd + like to add them to the IR. */ + type StateLatticeElement = LatticeMap[Variable, analysis.Interval] + type InterferenceLatticeElement = Map[Variable, StateLatticeElement] + val stateLattice = IntervalLatticeExtension() + val stateTransfer = SignedIntervalDomain().transfer + val intDom = ConditionalWritesDomain[StateLatticeElement](stateLattice, stateTransfer) + val relyGuarantees = + RelyGuaranteeGenerator[InterferenceLatticeElement, StateLatticeElement](intDom).generate(threads) + for ((p, (rely, guar)) <- relyGuarantees) { + StaticAnalysisLogger.info("--- " + p.procName + " " + "-" * 50 + "\n") + StaticAnalysisLogger.info("Rely:") + StaticAnalysisLogger.info(intDom.toString(rely) + "\n") + StaticAnalysisLogger.info("Guarantee:") + StaticAnalysisLogger.info(intDom.toString(guar) + "\n") + } + Set.empty + } + + override protected def preRun(ctx: IRContext): Unit = { + StaticAnalysisLogger.info("[!] Generating Rely-Guarantee Conditions") } } } @@ -892,7 +873,7 @@ object RunUtils { assert(invariant.cfgCorrect(ctx.program)) assert(invariant.blocksUniqueToEachProcedure(ctx.program)) - ctx = IRTransform.doCleanup(ctx, conf.simplify) + IRTransform.DoCleanup(conf.simplify)(ctx, AnalysisManager(ctx.program)) transforms.inlinePLTLaunchpad(ctx.program) @@ -960,13 +941,11 @@ object RunUtils { } if (conf.summariseProcedures) { - StaticAnalysisLogger.info("[!] Generating Procedure Summaries") - IRTransform.generateProcedureSummaries(ctx, ctx.program, q.loading.parameterForm || conf.simplify) + IRTransform.GenerateProcedureSummaries(q.loading.parameterForm || conf.simplify)(ctx, AnalysisManager(ctx.program)) } if (conf.summariseProcedures) { - StaticAnalysisLogger.info("[!] Generating Procedure Summaries") - IRTransform.generateProcedureSummaries(ctx, ctx.program, q.loading.parameterForm || conf.simplify) + IRTransform.GenerateProcedureSummaries(q.loading.parameterForm || conf.simplify)(ctx, AnalysisManager(ctx.program)) } if (q.runInterpret) { @@ -999,11 +978,10 @@ object RunUtils { } } - IRTransform.prepareForTranslation(q, ctx) + IRTransform.PrepareForTranslation(q)(ctx, AnalysisManager(ctx.program)) if (conf.generateRelyGuarantees) { - StaticAnalysisLogger.info("[!] Generating Rely-Guarantee Conditions") - IRTransform.generateRelyGuaranteeConditions(ctx.program.procedures.toList.filter(p => p.returnBlock != None)) + IRTransform.GenerateRelyGuaranteeConditions(ctx.program.procedures.toList.filter(p => p.returnBlock != None))(ctx, AnalysisManager(ctx.program)) } q.loading.dumpIL.foreach(s => { diff --git a/src/main/scala/util/Runner.scala b/src/main/scala/util/Runner.scala index 9d90b58ae4..653a44a781 100644 --- a/src/main/scala/util/Runner.scala +++ b/src/main/scala/util/Runner.scala @@ -4,303 +4,6 @@ import ir.* import ir.transforms.* import analysis.* -// --- Base Classes ---------------------------------------------------------------------------------------------------- - -/** Provides a consistent interface for IR transforms. - * - * @param name The name of this transform. - */ -trait Transform(val name: String) { - - // the performance of each transform is implicitly tested - val t = PerformanceTimer(name) - - // code to run before the transform implementation, such as logging information - protected def preRun(ctx: IRContext): Unit = {} - - // code to run after the transform implementation, such as logging information or assertions - protected def postRun(ctx: IRContext): Unit = {} - - /** Override this method to implement the logic for your transform. - * - * @param ctx The IR to be modified in-place. - * @param analyses Use this to access the results of static analyses. Any results not yet generated will be produced - * automatically and then cached in the manager for later retrieval. - * - * @return The set of analyses that are *preserved* after the transform. To clear all analyses after the transform is - * invoked, return Set.empty. (Note that this will negatively impact performance.) To preserve all analyses, return - * analyses.getAll(). - */ - protected def implementation: (ctx: IRContext, analyses: AnalysisManager) => Set[analyses.Memoizer] - - // instances of transforms can be directly called to invoke this method - def apply(ctx: IRContext, analyses: AnalysisManager): Unit = { - if (analyses.program ne ctx.program) { - // the program we are transforming should be the same one for which the analysis results were produced - throw new RuntimeException(s"Transform $name was passed an AnalysisManager of an IR Program with a different " + - s"reference value than the program being transformed.") - } - preRun(ctx) - t.checkPoint("start") - val toPreserve = implementation(ctx, analyses) - t.checkPoint("end") - postRun(ctx) - analyses.invalidateAllExcept(toPreserve) - } -} - -/** A transform can be a sequence of other transforms. We prefer using this over constructing transforms in the - * implementations of other transforms. - * - * @param name The name of this transform batch. - * @param transforms The sequence of other transforms that comprise this transform. - */ -trait TransformBatch(name: String, transforms: List[Transform]) extends Transform(name) { - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { - // simply apply each transform in-turn - transforms.foreach(_(ctx, analyses)) - Set.empty - } -} - -/** Provides a consistent interface for static analyses. - * Similar to Transform, but returns a result rather than modifying the IR in-place. - * - * @tparam ReturnType The type of the result that this analysis generates. - * @param name The name of this analysis. - */ -trait StaticAnalysis[ReturnType](val name: String) { - - val t = PerformanceTimer(name) - - protected def preRun(): Unit = {} - - protected def postRun(): Unit = {} - - protected def implementation: (Program, AnalysisManager) => ReturnType - - def apply(prog: Program, analyses: AnalysisManager): ReturnType = { - if (analyses.program ne prog) { - throw new RuntimeException(s"Analysis $name was passed an AnalysisManager of an IR Program with a different " + - s"reference value than the program being transformed.") - } - preRun() - t.checkPoint("start") - val ret = implementation(prog, analyses) - t.checkPoint("end") - postRun(ret) - ret - } -} - -// - -/** Analysis manager for caching and invalidating analysis results. - * - * @param program Each analysis manager is defined with respect to a particular program reference, which is always - * passed to the static analyses invoked via this manager. This ensures that the cached analysis results all relate to - * the same program reference. It is then the responsibility of Transforms to clear these results when they are - * invalidated by a modification to this program. - */ -class AnalysisManager(program: Program) { - - // memoizer to wrap static analyses and cache their results - private class Memoizer[ReturnType](analysis: StaticAnalysis[ReturnType]) { - - private var memo: Option[ReturnType] = None - - def invalidate() = { memo = None } - - // allows this memoizer to be called like a function - def apply(): ReturnType = { - // pass this analysis manager and its associated program to the static analysis - if memo.isEmpty then memo = Some(analysis(program, AnalysisManager.this)) - memo.get - } - } - - // keep track of all memoizers to ensure we can invalidate all of them - private val memoizers: Set[Memoizer[?]] = Nil - - // private helper function for creating and storing memoizers - private def register[ReturnType](analysis: StaticAnalysis[ReturnType]): Memoizer[ReturnType] = { - val mem = Memoizer[ReturnType](analysis) - memoizers ::= mem - return mem - } - - // list of memoizers - these can be directly called via this manager, e.g. val result = manager.exampleAnalysis() - val intraProcConstProp = register(IntraProcConstantPropagationAnalysis()) - val interProcConstProp = register(InterProcConstantPropagationAnalysis()) - val memoryRegionResult = register(MemoryRegionAnalysisSolverAnalysis()) - val vsaResult = register(ValueSetAnalysisSolverAnalysis()) - val interLiveVarsResults = register(/* todo */) - val paramResults = register(/* todo */) - val steensgaardSolver = register(/* todo */) // fixme: merge these into one analysis result? - val steensgaardPointsTo = register(/* todo */) - val steensgaardCallSiteSummary = register(/* todo */) - val mmmResults = register(/* todo */) - val reachingDefs = register(/* todo */) - val regionInjector = register(/* todo */) - val symbolicAddresses = register(/* todo */) - val localDSA = register(/* todo */) - val bottomUpDSA = register(/* todo */) - val topDownDSA = register(/* todo */) - val writesToResult = register(/* todo */) - val ssaResults = register(/* todo */) - val graResult = register(/* todo */) - val intraDomain = register(/* todo */) - val interDomain = register(/* todo */) - - // clears the cached results of all analyses except for those in the given set - def invalidateAllExcept(exceptions: Set[Memoizer]): Unit = - memoizers.filterNot(exceptions.contains).foreach(_.invalidate()) - - // useful to pass to 'invalidateAllExcept' when we want to preserve all or nearly all results after a transform - def getAll(): Set[Memoizer[?]] = memoizers // safe to directly return non-mutable set -} - -// --- DoCleanup Transform --------------------------------------------------------------------------------------------- - -/** Initial cleanup before analysis. - */ -class DoCleanup(doSimplify: Boolean = false) extends TransformBatch("DoCleanup", List( - MakeProcEntriesNonLoops(), - CoalesceBlocksFixpoint(), - ApplyRpo(), - ReplaceJumpsInNonReturningProcs(), - ReplaceReturnsTransform(doSimplify), - RemoveExternalFunctionReferences() -)) { - override protected def preRun(): Unit = { - Logger.info("[!] Removing external function calls") - } - - override protected def postRun(): Unit = { - assert(invariant.singleCallBlockEnd(ctx.program)) - assert(invariant.cfgCorrect(ctx.program)) - assert(invariant.blocksUniqueToEachProcedure(ctx.program)) - assert(invariant.procEntryNoIncoming(ctx.program)) - } -} - -// --- PrepareForTranslation Transform --------------------------------------------------------------------------------- - -/** Cull unneccessary information that does not need to be included in the translation, and infer stack regions, and - * add in modifies from the spec. - */ -class PrepareForTranslation(config: BASILConfig) extends TransformBatch("PrepareForTranslation", List( - DetermineRelevantMemory(config), - StripUnreachableFunctions(config), - StackSubstitution(config), - SetModifies(), - RenameBoogieKeywords() -)) { - override protected def postRun(ctx: IRContext): Unit = { - // check all blocks with an atomic section exist within the same procedure - val visited = mutable.Set[Block]() - for (p <- ctx.program.procedures) { - for (b <- p.blocks) { - if (!visited.contains(b)) { - if (b.atomicSection.isDefined) { - b.atomicSection.get.getBlocks.foreach { a => assert(a.parent == p) } - visited.addAll(b.atomicSection.get.getBlocks) - } - visited.addOne(b) - } - } - } - } -} - -// todo: not sure where to put this -class DetermineRelevantMemory(config: BASILConfig) extends Transform("DetermineRelevantMemory") { - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { - if (config.staticAnalysis.isEmpty || (config.staticAnalysis.get.memoryRegions == MemoryRegionsMode.Disabled)) { - ctx.program.determineRelevantMemory(ctx.globalOffsets) - } - Set.empty - } -} - -// todo: not sure where to put this -class StackSubstitution(config: BASILConfig) extends Transform("StackSubstitution") { - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { - if (!config.memoryTransform && - (config.staticAnalysis.isEmpty || (config.staticAnalysis.get.memoryRegions == MemoryRegionsMode.Disabled))) { - StackSubstituter().visitProgram(ctx.program) - } - Set.empty - } -} - -// todo: not sure where to put this -class SetModifies extends Transform("SetModifies") { - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { - val specModifies = ctx.specification.subroutines.map(s => s.name -> s.modifies).toMap - ctx.program.setModifies(specModifies) - Set.empty - } -} - -// todo: not sure where to put this -class RenameBoogieKeywords extends Transform("RenameBoogieKeywords") { - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { - Renamer(boogieReserved).visitProgram(ctx.program) - Set.empty - } - - override protected def postRun(ctx: IRContext): Unit = { - assert(invariant.singleCallBlockEnd(ctx.program)) - } -} - -// --- GenerateProcedureSummaries -------------------------------------------------------------------------------------- - -class GenerateProcedureSummaries(simplified: Boolean = false) extends Transform("GenerateProcedureSummaries") { - // (?) removed the 'modified' variable that we used to return from this function - // (?) removed the 'IRProgram' parameter - using ctx.program instead - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { - val prog = ctx.program - // Need to know modifies clauses to generate summaries, but this is probably out of place (fixme) - val specModifies = ctx.specification.subroutines.map(s => s.name -> s.modifies).toMap - prog.setModifies(specModifies) - - val summaryGenerator = SummaryGenerator(prog, simplified) - for procedure <- prog.procedures if procedure != prog.mainProcedure do - procedure.requires = summaryGenerator.generateRequires(procedure) - procedure.ensures = summaryGenerator.generateEnsures(procedure) - - Set.empty - } -} - -// --- GenerateRelyGuaranteeConditions --------------------------------------------------------------------------------- - -class GenerateRgConditions(threads: List[Procedure]) extends Transform("GenerateRgConditions") { - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { - /* Todo: For the moment we are printing these to stdout, but in future we'd - like to add them to the IR. */ - type StateLatticeElement = LatticeMap[Variable, analysis.Interval] - type InterferenceLatticeElement = Map[Variable, StateLatticeElement] - val stateLattice = IntervalLatticeExtension() - val stateTransfer = SignedIntervalDomain().transfer - val intDom = ConditionalWritesDomain[StateLatticeElement](stateLattice, stateTransfer) - val relyGuarantees = - RelyGuaranteeGenerator[InterferenceLatticeElement, StateLatticeElement](intDom).generate(threads) - for ((p, (rely, guar)) <- relyGuarantees) { - StaticAnalysisLogger.info("--- " + p.procName + " " + "-" * 50 + "\n") - StaticAnalysisLogger.info("Rely:") - StaticAnalysisLogger.info(intDom.toString(rely) + "\n") - StaticAnalysisLogger.info("Guarantee:") - StaticAnalysisLogger.info(intDom.toString(guar) + "\n") - } - Set.empty - } -} - -// --- DoSimplify Transform -------------------------------------------------------------------------------------------- - /* class DoSimplify(config: BASILConfig) extends TransformBatch("DoSimplify", List( From 38269914d8773593eb3ff76758ee49150c35a96c Mon Sep 17 00:00:00 2001 From: James Tobler <64625414+j-tobler@users.noreply.github.com> Date: Fri, 30 May 2025 17:34:02 +1000 Subject: [PATCH 05/30] fixed compilation errors --- src/main/scala/analysis/AnalysisManager.scala | 55 +++--- src/main/scala/analysis/StaticAnalysis.scala | 4 +- src/main/scala/ir/transforms/Simp.scala | 174 +++++++++--------- .../StripUnreachableFunctions.scala | 7 +- src/main/scala/ir/transforms/Transform.scala | 7 +- src/main/scala/util/RunUtils.scala | 106 +++++------ src/main/scala/util/Runner.scala | 6 +- 7 files changed, 181 insertions(+), 178 deletions(-) diff --git a/src/main/scala/analysis/AnalysisManager.scala b/src/main/scala/analysis/AnalysisManager.scala index c46523a690..6cb80eb4ec 100644 --- a/src/main/scala/analysis/AnalysisManager.scala +++ b/src/main/scala/analysis/AnalysisManager.scala @@ -1,6 +1,7 @@ package analysis import ir.Program +import scala.collection.mutable /** Analysis manager for caching and invalidating analysis results. * @@ -9,10 +10,10 @@ import ir.Program * the same program reference. It is then the responsibility of Transforms to clear these results when they are * invalidated by a modification to this program. */ -class AnalysisManager(program: Program) { +class AnalysisManager(val program: Program) { // memoizer to wrap static analyses and cache their results - private class Memoizer[ReturnType](analysis: StaticAnalysis[ReturnType]) { + class Memoizer[ReturnType](analysis: StaticAnalysis[ReturnType]) { private var memo: Option[ReturnType] = None @@ -27,42 +28,42 @@ class AnalysisManager(program: Program) { } // keep track of all memoizers to ensure we can invalidate all of them - private val memoizers: Set[Memoizer[?]] = Nil + private val memoizers: mutable.Set[Memoizer[?]] = mutable.Set.empty // private helper function for creating and storing memoizers private def register[ReturnType](analysis: StaticAnalysis[ReturnType]): Memoizer[ReturnType] = { val mem = Memoizer[ReturnType](analysis) - memoizers ::= mem + memoizers += mem return mem } // list of memoizers - these can be directly called via this manager, e.g. val result = manager.exampleAnalysis() - val intraProcConstProp = register(IntraProcConstantPropagationAnalysis()) - val interProcConstProp = register(InterProcConstantPropagationAnalysis()) - val memoryRegionResult = register(MemoryRegionAnalysisSolverAnalysis()) - val vsaResult = register(ValueSetAnalysisSolverAnalysis()) - val interLiveVarsResults = register(/* todo */) - val paramResults = register(/* todo */) - val steensgaardSolver = register(/* todo */) // fixme: merge these into one analysis result? - val steensgaardPointsTo = register(/* todo */) - val steensgaardCallSiteSummary = register(/* todo */) - val mmmResults = register(/* todo */) - val reachingDefs = register(/* todo */) - val regionInjector = register(/* todo */) - val symbolicAddresses = register(/* todo */) - val localDSA = register(/* todo */) - val bottomUpDSA = register(/* todo */) - val topDownDSA = register(/* todo */) - val writesToResult = register(/* todo */) - val ssaResults = register(/* todo */) - val graResult = register(/* todo */) - val intraDomain = register(/* todo */) - val interDomain = register(/* todo */) +// val intraProcConstProp = register(IntraProcConstantPropagationAnalysis()) +// val interProcConstProp = register(InterProcConstantPropagationAnalysis()) +// val memoryRegionResult = register(MemoryRegionAnalysisSolverAnalysis()) +// val vsaResult = register(ValueSetAnalysisSolverAnalysis()) +// val interLiveVarsResults = register(/* todo */) +// val paramResults = register(/* todo */) +// val steensgaardSolver = register(/* todo */) // fixme: merge these into one analysis result? +// val steensgaardPointsTo = register(/* todo */) +// val steensgaardCallSiteSummary = register(/* todo */) +// val mmmResults = register(/* todo */) +// val reachingDefs = register(/* todo */) +// val regionInjector = register(/* todo */) +// val symbolicAddresses = register(/* todo */) +// val localDSA = register(/* todo */) +// val bottomUpDSA = register(/* todo */) +// val topDownDSA = register(/* todo */) +// val writesToResult = register(/* todo */) +// val ssaResults = register(/* todo */) +// val graResult = register(/* todo */) +// val intraDomain = register(/* todo */) +// val interDomain = register(/* todo */) // clears the cached results of all analyses except for those in the given set - def invalidateAllExcept(exceptions: Set[Memoizer]): Unit = + def invalidateAllExcept(exceptions: Set[Memoizer[?]]): Unit = memoizers.filterNot(exceptions.contains).foreach(_.invalidate()) // useful to pass to 'invalidateAllExcept' when we want to preserve all or nearly all results after a transform - def getAll(): Set[Memoizer[?]] = memoizers // safe to directly return non-mutable set + def getAll(): Set[Memoizer[?]] = memoizers.toSet } diff --git a/src/main/scala/analysis/StaticAnalysis.scala b/src/main/scala/analysis/StaticAnalysis.scala index 7b1d6a9c42..27ae15470e 100644 --- a/src/main/scala/analysis/StaticAnalysis.scala +++ b/src/main/scala/analysis/StaticAnalysis.scala @@ -1,6 +1,8 @@ package analysis import ir.Program +import util.PerformanceTimer + /** Provides a consistent interface for static analyses. * Similar to Transform, but returns a result rather than modifying the IR in-place. @@ -14,7 +16,7 @@ trait StaticAnalysis[ReturnType](val name: String) { protected def preRun(): Unit = {} - protected def postRun(): Unit = {} + protected def postRun(ret: ReturnType): Unit = {} protected def implementation: (Program, AnalysisManager) => ReturnType diff --git a/src/main/scala/ir/transforms/Simp.scala b/src/main/scala/ir/transforms/Simp.scala index 2fd7169c42..09b6ecbfcc 100644 --- a/src/main/scala/ir/transforms/Simp.scala +++ b/src/main/scala/ir/transforms/Simp.scala @@ -15,6 +15,7 @@ import scala.concurrent.duration.* import scala.util.{Failure, Success} import ExecutionContext.Implicits.global import scala.util.boundary, boundary.break +import util.IRContext /** Simplification pass, see also: docs/development/simplification-solvers.md */ @@ -791,68 +792,66 @@ def coalesceBlocksCrossBranchDependency(p: Program): Boolean = { candidate.nonEmpty } +def coalesceBlocks(proc: Procedure): Boolean = { + var didAny = false - -class CoalesceBlocks extends Transform("CoalesceBlocks") { - protected def coalesceBlocks(proc: Procedure): Boolean = { - var didAny = false - - val blocks = proc.blocks.toList - for (b <- blocks.sortBy(_.rpoOrder)) { - if ( - b.prevBlocks.size == 1 && b.prevBlocks.head.statements.nonEmpty && b.statements.nonEmpty - && b.prevBlocks.head.nextBlocks.size == 1 - && b.prevBlocks.head.statements.lastOption.forall(s => !s.isInstanceOf[Call]) - && !(b.parent.entryBlock.contains(b) || b.parent.returnBlock.contains(b)) - && b.atomicSection.isEmpty && b.prevBlocks.forall(_.atomicSection.isEmpty) - ) { - didAny = true - // append topredecessor - // we know prevBlock is only jumping to b and has no call at the end - val prevBlock = b.prevBlocks.head - val stmts = b.statements.map(b.statements.remove).toList - prevBlock.statements.appendAll(stmts) - // leave empty block b and cleanup with removeEmptyBlocks - } else if ( - b.nextBlocks.size == 1 && b.nextBlocks.head.statements.nonEmpty && b.statements.nonEmpty - && b.nextBlocks.head.prevBlocks.size == 1 - && b.statements.lastOption.forall(s => !s.isInstanceOf[Call]) - && !(b.parent.entryBlock.contains(b) || b.parent.returnBlock.contains(b)) - && b.atomicSection.isEmpty && b.nextBlocks.forall(_.atomicSection.isEmpty) - ) { - didAny = true - // append to successor - // we know b is only jumping to nextBlock and does not end in a call - val nextBlock = b.nextBlocks.head - val stmts = b.statements.map(b.statements.remove).toList - nextBlock.statements.prependAll(stmts) - // leave empty block b and cleanup with removeEmptyBlocks - } else if (b.jump.isInstanceOf[Unreachable] && b.statements.isEmpty && b.prevBlocks.size == 1) { - b.prevBlocks.head.replaceJump(Unreachable()) - b.parent.removeBlocks(b) - } + val blocks = proc.blocks.toList + for (b <- blocks.sortBy(_.rpoOrder)) { + if ( + b.prevBlocks.size == 1 && b.prevBlocks.head.statements.nonEmpty && b.statements.nonEmpty + && b.prevBlocks.head.nextBlocks.size == 1 + && b.prevBlocks.head.statements.lastOption.forall(s => !s.isInstanceOf[Call]) + && !(b.parent.entryBlock.contains(b) || b.parent.returnBlock.contains(b)) + && b.atomicSection.isEmpty && b.prevBlocks.forall(_.atomicSection.isEmpty) + ) { + didAny = true + // append topredecessor + // we know prevBlock is only jumping to b and has no call at the end + val prevBlock = b.prevBlocks.head + val stmts = b.statements.map(b.statements.remove).toList + prevBlock.statements.appendAll(stmts) + // leave empty block b and cleanup with removeEmptyBlocks + } else if ( + b.nextBlocks.size == 1 && b.nextBlocks.head.statements.nonEmpty && b.statements.nonEmpty + && b.nextBlocks.head.prevBlocks.size == 1 + && b.statements.lastOption.forall(s => !s.isInstanceOf[Call]) + && !(b.parent.entryBlock.contains(b) || b.parent.returnBlock.contains(b)) + && b.atomicSection.isEmpty && b.nextBlocks.forall(_.atomicSection.isEmpty) + ) { + didAny = true + // append to successor + // we know b is only jumping to nextBlock and does not end in a call + val nextBlock = b.nextBlocks.head + val stmts = b.statements.map(b.statements.remove).toList + nextBlock.statements.prependAll(stmts) + // leave empty block b and cleanup with removeEmptyBlocks + } else if (b.jump.isInstanceOf[Unreachable] && b.statements.isEmpty && b.prevBlocks.size == 1) { + b.prevBlocks.head.replaceJump(Unreachable()) + b.parent.removeBlocks(b) } - didAny } + didAny +} - protected def coalesceBlocks(p: Program): Boolean = { - var didAny = false - for (proc <- p.procedures) { - didAny = didAny || coalesceBlocks(proc) - } - didAny +def coalesceBlocks(p: Program): Boolean = { + var didAny = false + for (proc <- p.procedures) { + didAny = didAny || coalesceBlocks(proc) } + didAny +} - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { +class CoalesceBlocks(name: String = "CoalesceBlocks") extends Transform(name) { + // todo: make these protected (?) + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer[?]] = { coalesceBlocks(ctx.program) Set.empty } } -class CoalesceBlocksFixpoint extends CoalesceBlocks { - override val name: String = "CoalesceBlocksFixpoint" +class CoalesceBlocksFixpoint extends CoalesceBlocks("CoalesceBlocksFixpoint") { - override def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { + override def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer[?]] = { // useful for ReplaceReturns // (pushes single block with `Unreachable` into its predecessor) while (coalesceBlocks(ctx.program)) {} @@ -860,10 +859,6 @@ class CoalesceBlocksFixpoint extends CoalesceBlocks { } } - - - - def removeDeadInParams(p: Program): Boolean = { var modified = false assert(invariant.correctCalls(p)) @@ -1132,47 +1127,46 @@ def copyPropParamFixedPoint(p: Program, rela: Map[BigInt, BigInt]): Int = { iterations } - -class ApplyRpo extends Transform("ApplyRpo") { - def reversePostOrder(p: Procedure): Unit = { - /* Procedures may contain disconnected sets of blocks so we arbitrarily order these with respect to eachother. */ - for (b <- p.blocks) { - b.rpoOrder = -1 - } - var left = p.entryBlock.map(reversePostOrder(_)).getOrElse(0) + 1 - for (b <- p.blocks.filter(_.rpoOrder == -1)) { - left = reversePostOrder(b, true, left) + 1 - } +def reversePostOrder(p: Procedure): Unit = { + /* Procedures may contain disconnected sets of blocks so we arbitrarily order these with respect to eachother. */ + for (b <- p.blocks) { + b.rpoOrder = -1 } + var left = p.entryBlock.map(reversePostOrder(_)).getOrElse(0) + 1 + for (b <- p.blocks.filter(_.rpoOrder == -1)) { + left = reversePostOrder(b, true, left) + 1 + } +} - private def reversePostOrder(startBlock: Block, fixup: Boolean = false, begin: Int = 0): Int = { - var count = begin - val seen = mutable.HashSet[Block]() +def reversePostOrder(startBlock: Block, fixup: Boolean = false, begin: Int = 0): Int = { + var count = begin + val seen = mutable.HashSet[Block]() - def walk(b: Block): Unit = { - seen += b - for (s <- b.nextBlocks) { - if (!seen.contains(s)) { - walk(s) - } - } - if (!fixup || b.rpoOrder < count) { - b.rpoOrder = count + def walk(b: Block): Unit = { + seen += b + for (s <- b.nextBlocks) { + if (!seen.contains(s)) { + walk(s) } - count += 1 } - - walk(startBlock) - count + if (!fixup || b.rpoOrder < count) { + b.rpoOrder = count + } + count += 1 } - private def applyRPO(p: Program) = { - for (proc <- p.procedures) { - reversePostOrder(proc) - } + walk(startBlock) + count +} + +def applyRPO(p: Program) = { + for (proc <- p.procedures) { + reversePostOrder(proc) } +} - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { +class ApplyRpo extends Transform("ApplyRpo") { + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer[?]] = { applyRPO(ctx.program) Set.empty } @@ -1644,7 +1638,7 @@ def findDefinitelyExits(p: Program): ProcReturnInfo = { // todo: not sure whether to make 'findDefinitelyExits' a private method of this class class ReplaceJumpsInNonReturningProcs extends Transform("ReplaceJumpsInNonReturningProcs") { - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer[?]] = { val nonReturning = findDefinitelyExits(ctx.program) ctx.program.mainProcedure.foreach { case d: DirectCall if nonReturning.nonreturning.contains(d.target) => d.parent.replaceJump(Return()) @@ -1656,7 +1650,7 @@ class ReplaceJumpsInNonReturningProcs extends Transform("ReplaceJumpsInNonReturn // todo: i have no idea what to do with this class ReplaceReturnsTransform(doSimplify: Boolean) extends Transform("ReplaceReturns") { - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer[?]] = { // FIXME: Main will often maintain the stack by loading R30 from the caller's stack frame // before returning, which makes the R30 assertin faile. Hence we currently skip this // assertion for main, instead we should precondition the stack layout before main @@ -1672,7 +1666,7 @@ class ReplaceReturnsTransform(doSimplify: Boolean) extends Transform("ReplaceRet } class RemoveExternalFunctionReferences extends Transform("RemoveExternalFunctionReferences") { - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer[?]] = { val externalNames = ctx.externalFunctions.map(_.name) val unqualifiedNames = externalNames.filter(_.contains('@')).map(_.split('@')(0)) ExternalRemover(externalNames ++ unqualifiedNames).visitProgram(ctx.program) @@ -1908,7 +1902,7 @@ class MakeProcEntriesNonLoops extends Transform("MakeProcEntriesNonLoops") { } } - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer[?]] = { ctx.program.procedures.foreach(makeProcEntryNonLoop) Set.empty } diff --git a/src/main/scala/ir/transforms/StripUnreachableFunctions.scala b/src/main/scala/ir/transforms/StripUnreachableFunctions.scala index 7ec9ff168b..464b2fc2eb 100644 --- a/src/main/scala/ir/transforms/StripUnreachableFunctions.scala +++ b/src/main/scala/ir/transforms/StripUnreachableFunctions.scala @@ -3,10 +3,13 @@ import ir.* import collection.mutable import util.Logger import ir.cilvisitor.* +import util.BASILConfig +import util.IRContext +import analysis.AnalysisManager // This shouldn't be run before indirect calls are resolved class StripUnreachableFunctions(config: BASILConfig) extends Transform("StripUnreachableFunctions") { - private val before: Int + private var before: Int = 0 private def stripUnreachableFunctions(p: Program, depth: Int = Int.MaxValue): Unit = { val procedureCalleeNames = p.procedures.map(f => f -> f.calls).toMap @@ -52,7 +55,7 @@ class StripUnreachableFunctions(config: BASILConfig) extends Transform("StripUnr assert(invariant.cfgCorrect(p)) } - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer[?]] = { stripUnreachableFunctions(ctx.program, config.loading.procedureTrimDepth) Set.empty } diff --git a/src/main/scala/ir/transforms/Transform.scala b/src/main/scala/ir/transforms/Transform.scala index ec0e4e32db..e05ce6d786 100644 --- a/src/main/scala/ir/transforms/Transform.scala +++ b/src/main/scala/ir/transforms/Transform.scala @@ -3,6 +3,7 @@ package ir.transforms import util.IRContext import ir.Program import analysis.AnalysisManager +import util.PerformanceTimer /** Provides a consistent interface for IR transforms. * @@ -29,7 +30,7 @@ trait Transform(val name: String) { * invoked, return Set.empty. (Note that this will negatively impact performance.) To preserve all analyses, return * analyses.getAll(). */ - protected def implementation: (ctx: IRContext, analyses: AnalysisManager) => Set[analyses.Memoizer] + protected def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer[?]] // instances of transforms can be directly called to invoke this method def apply(ctx: IRContext, analyses: AnalysisManager): Unit = { @@ -53,8 +54,8 @@ trait Transform(val name: String) { * @param name The name of this transform batch. * @param transforms The sequence of other transforms that comprise this transform. */ -trait TransformBatch(name: String, transforms: List[Transform]) extends Transform(name) { - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { +class TransformBatch(name: String, transforms: List[Transform]) extends Transform(name) { + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer[?]] = { // simply apply each transform in-turn transforms.foreach(_(ctx, analyses)) Set.empty diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index 7568ebb9ee..a3a7637d59 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -44,6 +44,10 @@ import util.LogLevel.INFO import scala.annotation.tailrec import scala.collection.mutable +import analysis.StaticAnalysis +import analysis.AnalysisManager +import ir.transforms.* + /** This file contains the main program execution. See RunUtils.loadAndTranslate for the high-level process. */ @@ -257,11 +261,11 @@ object IRTransform { ReplaceReturnsTransform(doSimplify), RemoveExternalFunctionReferences() )) { - override protected def preRun(): Unit = { + override protected def preRun(ctx: IRContext): Unit = { Logger.info("[!] Removing external function calls") // fixme: seems odd? } - override protected def postRun(): Unit = { + override protected def postRun(ctx: IRContext): Unit = { assert(invariant.singleCallBlockEnd(ctx.program)) assert(invariant.cfgCorrect(ctx.program)) assert(invariant.blocksUniqueToEachProcedure(ctx.program)) @@ -269,6 +273,48 @@ object IRTransform { } } + // todo: not sure where to put this + class DetermineRelevantMemory(config: BASILConfig) extends Transform("DetermineRelevantMemory") { + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer[?]] = { + if (config.staticAnalysis.isEmpty || (config.staticAnalysis.get.memoryRegions == MemoryRegionsMode.Disabled)) { + ctx.program.determineRelevantMemory(ctx.globalOffsets) + } + Set.empty + } + } + + // todo: not sure where to put this + class StackSubstitution(config: BASILConfig) extends Transform("StackSubstitution") { + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer[?]] = { + if (!config.memoryTransform && + (config.staticAnalysis.isEmpty || (config.staticAnalysis.get.memoryRegions == MemoryRegionsMode.Disabled))) { + StackSubstituter().visitProgram(ctx.program) + } + Set.empty + } + } + + // todo: not sure where to put this + class SetModifies extends Transform("SetModifies") { + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer[?]] = { + val specModifies = ctx.specification.subroutines.map(s => s.name -> s.modifies).toMap + ctx.program.setModifies(specModifies) + Set.empty + } + } + + // todo: not sure where to put this + class RenameBoogieKeywords extends Transform("RenameBoogieKeywords") { + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer[?]] = { + Renamer(boogieReserved).visitProgram(ctx.program) + Set.empty + } + + override protected def postRun(ctx: IRContext): Unit = { + assert(invariant.singleCallBlockEnd(ctx.program)) + } + } + /** Cull unneccessary information that does not need to be included in the translation, and infer stack regions, and * add in modifies from the spec. */ @@ -294,54 +340,12 @@ object IRTransform { } } } - - // todo: not sure where to put this - class DetermineRelevantMemory(config: BASILConfig) extends Transform("DetermineRelevantMemory") { - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { - if (config.staticAnalysis.isEmpty || (config.staticAnalysis.get.memoryRegions == MemoryRegionsMode.Disabled)) { - ctx.program.determineRelevantMemory(ctx.globalOffsets) - } - Set.empty - } - } - - // todo: not sure where to put this - class StackSubstitution(config: BASILConfig) extends Transform("StackSubstitution") { - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { - if (!config.memoryTransform && - (config.staticAnalysis.isEmpty || (config.staticAnalysis.get.memoryRegions == MemoryRegionsMode.Disabled))) { - StackSubstituter().visitProgram(ctx.program) - } - Set.empty - } - } - - // todo: not sure where to put this - class SetModifies extends Transform("SetModifies") { - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { - val specModifies = ctx.specification.subroutines.map(s => s.name -> s.modifies).toMap - ctx.program.setModifies(specModifies) - Set.empty - } - } - - // todo: not sure where to put this - class RenameBoogieKeywords extends Transform("RenameBoogieKeywords") { - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { - Renamer(boogieReserved).visitProgram(ctx.program) - Set.empty - } - - override protected def postRun(ctx: IRContext): Unit = { - assert(invariant.singleCallBlockEnd(ctx.program)) - } - } } class GenerateProcedureSummaries(simplified: Boolean = false) extends Transform("GenerateProcedureSummaries") { // (?) removed the 'modified' variable that we used to return from this function // (?) removed the 'IRProgram' parameter - using ctx.program instead - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer[?]] = { val prog = ctx.program // Need to know modifies clauses to generate summaries, but this is probably out of place (fixme) val specModifies = ctx.specification.subroutines.map(s => s.name -> s.modifies).toMap @@ -361,7 +365,7 @@ object IRTransform { } class GenerateRgConditions(threads: List[Procedure]) extends Transform("GenerateRgConditions") { - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer[?]] = { /* Todo: For the moment we are printing these to stdout, but in future we'd like to add them to the IR. */ type StateLatticeElement = LatticeMap[Variable, analysis.Interval] @@ -878,11 +882,9 @@ object RunUtils { transforms.inlinePLTLaunchpad(ctx.program) if (q.loading.trimEarly) { - val before = ctx.program.procedures.size - transforms.stripUnreachableFunctions(ctx.program, q.loading.procedureTrimDepth) - Logger.info( - s"[!] Removed ${before - ctx.program.procedures.size} functions (${ctx.program.procedures.size} remaining)" - ) + StripUnreachableFunctions(q)(ctx, AnalysisManager(ctx.program)) + // todo: since refactoring, there is some extra code that is run here + // namely, the portion of StripUnreachableFunctions.postRun after the logger } ctx.program.procedures.foreach(transforms.RemoveUnreachableBlocks.apply) @@ -981,7 +983,7 @@ object RunUtils { IRTransform.PrepareForTranslation(q)(ctx, AnalysisManager(ctx.program)) if (conf.generateRelyGuarantees) { - IRTransform.GenerateRelyGuaranteeConditions(ctx.program.procedures.toList.filter(p => p.returnBlock != None))(ctx, AnalysisManager(ctx.program)) + IRTransform.GenerateRgConditions(ctx.program.procedures.toList.filter(p => p.returnBlock != None))(ctx, AnalysisManager(ctx.program)) } q.loading.dumpIL.foreach(s => { diff --git a/src/main/scala/util/Runner.scala b/src/main/scala/util/Runner.scala index 653a44a781..5a1f6de9a6 100644 --- a/src/main/scala/util/Runner.scala +++ b/src/main/scala/util/Runner.scala @@ -37,16 +37,16 @@ class DoSimplify(config: BASILConfig) extends TransformBatch("DoSimplify", List( LogIrAfterSlices(), LogSimplificationValidation() )) - +*/ class IdentifyLoops extends Transform("IdentifyLoops") { - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { + def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer[?]] = { val foundLoops = LoopDetector.identify_loops(ctx.program) val newLoops = foundLoops.reducibleTransformIR() newLoops.updateIrWithLoops() Set.empty } } - +/* class NormaliseBlockNames extends Transform("NormaliseBlockNames") { def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { ctx.program.procedures.foreach(_.normaliseBlockNames()) From 03fd78e668186e96c482c93379a13ed25f5614cb Mon Sep 17 00:00:00 2001 From: James Tobler <64625414+j-tobler@users.noreply.github.com> Date: Wed, 4 Jun 2025 10:47:59 +1000 Subject: [PATCH 06/30] made transforms toggle-able; simplified parameters for some transforms --- .../StripUnreachableFunctions.scala | 6 +++--- src/main/scala/ir/transforms/Transform.scala | 5 ++++- src/main/scala/util/RunUtils.scala | 21 +++++++++---------- 3 files changed, 17 insertions(+), 15 deletions(-) diff --git a/src/main/scala/ir/transforms/StripUnreachableFunctions.scala b/src/main/scala/ir/transforms/StripUnreachableFunctions.scala index 464b2fc2eb..e36dca9eec 100644 --- a/src/main/scala/ir/transforms/StripUnreachableFunctions.scala +++ b/src/main/scala/ir/transforms/StripUnreachableFunctions.scala @@ -8,10 +8,10 @@ import util.IRContext import analysis.AnalysisManager // This shouldn't be run before indirect calls are resolved -class StripUnreachableFunctions(config: BASILConfig) extends Transform("StripUnreachableFunctions") { +class StripUnreachableFunctions(depth: Int) extends Transform("StripUnreachableFunctions") { private var before: Int = 0 - private def stripUnreachableFunctions(p: Program, depth: Int = Int.MaxValue): Unit = { + private def stripUnreachableFunctions(p: Program): Unit = { val procedureCalleeNames = p.procedures.map(f => f -> f.calls).toMap val toVisit: mutable.LinkedHashSet[(Int, Procedure)] = mutable.LinkedHashSet((0, p.mainProcedure)) @@ -56,7 +56,7 @@ class StripUnreachableFunctions(config: BASILConfig) extends Transform("StripUnr } def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer[?]] = { - stripUnreachableFunctions(ctx.program, config.loading.procedureTrimDepth) + stripUnreachableFunctions(ctx.program) Set.empty } diff --git a/src/main/scala/ir/transforms/Transform.scala b/src/main/scala/ir/transforms/Transform.scala index e05ce6d786..92fa3ebcf9 100644 --- a/src/main/scala/ir/transforms/Transform.scala +++ b/src/main/scala/ir/transforms/Transform.scala @@ -10,7 +10,9 @@ import util.PerformanceTimer * @param name The name of this transform. */ trait Transform(val name: String) { - + + // when false, this transform does nothing + val toggle: Boolean = true // the performance of each transform is implicitly tested val t = PerformanceTimer(name) @@ -34,6 +36,7 @@ trait Transform(val name: String) { // instances of transforms can be directly called to invoke this method def apply(ctx: IRContext, analyses: AnalysisManager): Unit = { + if (!toggle) return if (analyses.program ne ctx.program) { // the program we are transforming should be the same one for which the analysis results were produced throw new RuntimeException(s"Transform $name was passed an AnalysisManager of an IR Program with a different " + diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index a3a7637d59..67b5d86a4a 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -274,9 +274,9 @@ object IRTransform { } // todo: not sure where to put this - class DetermineRelevantMemory(config: BASILConfig) extends Transform("DetermineRelevantMemory") { + class DetermineRelevantMemory(maybeStaticAnalysisConfig: Option[StaticAnalysisConfig]) extends Transform("DetermineRelevantMemory") { def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer[?]] = { - if (config.staticAnalysis.isEmpty || (config.staticAnalysis.get.memoryRegions == MemoryRegionsMode.Disabled)) { + if (maybeStaticAnalysisConfig.isEmpty || (maybeStaticAnalysisConfig.get.memoryRegions == MemoryRegionsMode.Disabled)) { ctx.program.determineRelevantMemory(ctx.globalOffsets) } Set.empty @@ -284,12 +284,9 @@ object IRTransform { } // todo: not sure where to put this - class StackSubstitution(config: BASILConfig) extends Transform("StackSubstitution") { + class StackSubstitution(override val toggle: Boolean = true) extends Transform("StackSubstitution") { def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer[?]] = { - if (!config.memoryTransform && - (config.staticAnalysis.isEmpty || (config.staticAnalysis.get.memoryRegions == MemoryRegionsMode.Disabled))) { - StackSubstituter().visitProgram(ctx.program) - } + StackSubstituter().visitProgram(ctx.program) Set.empty } } @@ -319,9 +316,11 @@ object IRTransform { * add in modifies from the spec. */ class PrepareForTranslation(config: BASILConfig) extends TransformBatch("PrepareForTranslation", List( - DetermineRelevantMemory(config), - StripUnreachableFunctions(config), - StackSubstitution(config), + DetermineRelevantMemory(config.staticAnalysis), + StripUnreachableFunctions(config.loading.procedureTrimDepth), + StackSubstitution(toggle = + !config.memoryTransform && + (config.staticAnalysis.isEmpty || (config.staticAnalysis.get.memoryRegions == MemoryRegionsMode.Disabled))), SetModifies(), RenameBoogieKeywords() )) { @@ -882,7 +881,7 @@ object RunUtils { transforms.inlinePLTLaunchpad(ctx.program) if (q.loading.trimEarly) { - StripUnreachableFunctions(q)(ctx, AnalysisManager(ctx.program)) + StripUnreachableFunctions(q.loading.procedureTrimDepth)(ctx, AnalysisManager(ctx.program)) // todo: since refactoring, there is some extra code that is run here // namely, the portion of StripUnreachableFunctions.postRun after the logger } From 5baa3979fb92922130127f195e29d0d38ce1d479 Mon Sep 17 00:00:00 2001 From: James Tobler <64625414+j-tobler@users.noreply.github.com> Date: Wed, 4 Jun 2025 11:51:14 +1000 Subject: [PATCH 07/30] fixed compilation errors in tests --- src/test/scala/ConditionLiftingTests.scala | 4 +++- src/test/scala/DifferentialAnalysisTest.scala | 5 +++-- src/test/scala/InterpretTestConstProp.scala | 4 ++-- src/test/scala/ir/InterpreterTests.scala | 6 ++++-- 4 files changed, 12 insertions(+), 7 deletions(-) diff --git a/src/test/scala/ConditionLiftingTests.scala b/src/test/scala/ConditionLiftingTests.scala index 915904f96e..a9ca36a926 100644 --- a/src/test/scala/ConditionLiftingTests.scala +++ b/src/test/scala/ConditionLiftingTests.scala @@ -3,6 +3,8 @@ import org.scalatest.matchers.should.Matchers import scala.util.{Try, Failure, Success} import java.io.OutputStream import translating.PrettyPrinter.* +import util.IRTransform +import analysis.AnalysisManager import ir.* import ir.dsl.* @@ -823,7 +825,7 @@ class ConditionLiftingRegressionTest extends AnyFunSuite with test_util.CaptureO test("conds inline test") { var ctx = util.IRLoading.load(testProgram) - util.IRTransform.doCleanup(ctx, true) + IRTransform.DoCleanup(true)(ctx, AnalysisManager(ctx.program)) ir.transforms.clearParams(ctx.program) ctx = ir.transforms.liftProcedureCallAbstraction(ctx) util.RunUtils.doSimplify(ctx, None) diff --git a/src/test/scala/DifferentialAnalysisTest.scala b/src/test/scala/DifferentialAnalysisTest.scala index d00b5f4dbe..a3f6de1c3a 100644 --- a/src/test/scala/DifferentialAnalysisTest.scala +++ b/src/test/scala/DifferentialAnalysisTest.scala @@ -6,6 +6,7 @@ import org.scalatest.funsuite.* import util.{IRLoading, ILLoadingConfig, IRContext, RunUtils, StaticAnalysisConfig, Logger, LogLevel, IRTransform} import ir.eval.ExecEffect import test_util.* +import analysis.AnalysisManager abstract class DifferentialTest extends AnyFunSuite, CaptureOutput, TestCustomisation { @@ -89,10 +90,10 @@ abstract class DifferentialTest extends AnyFunSuite, CaptureOutput, TestCustomis ) var ictx = IRLoading.load(loading) - ictx = IRTransform.doCleanup(ictx) + IRTransform.DoCleanup()(ictx, AnalysisManager(ictx.program)) var comparectx = IRLoading.load(loading) - comparectx = IRTransform.doCleanup(comparectx) + IRTransform.DoCleanup()(comparectx, AnalysisManager(comparectx.program)) ir.transforms.clearParams(ictx.program) diff --git a/src/test/scala/InterpretTestConstProp.scala b/src/test/scala/InterpretTestConstProp.scala index 255a7911c0..918456e7f3 100644 --- a/src/test/scala/InterpretTestConstProp.scala +++ b/src/test/scala/InterpretTestConstProp.scala @@ -54,8 +54,8 @@ class InterpretTestConstProp val path = s"$testPath/$testName/$compiler/$testName" val loading = ILLoadingConfig(inputFile = s"$path.adt", relfFile = s"$path.relf", dumpIL = None) - var ictx = IRLoading.load(loading) - ictx = IRTransform.doCleanup(ictx) + val ictx = IRLoading.load(loading) + IRTransform.DoCleanup()(ictx, AnalysisManager(ictx.program)) ir.transforms.clearParams(ictx.program) val analyses = RunUtils.staticAnalysis(StaticAnalysisConfig(None, None, None), ictx) diff --git a/src/test/scala/ir/InterpreterTests.scala b/src/test/scala/ir/InterpreterTests.scala index 42dfaae7f0..7bf3526e8b 100644 --- a/src/test/scala/ir/InterpreterTests.scala +++ b/src/test/scala/ir/InterpreterTests.scala @@ -16,6 +16,7 @@ import util.{ILLoadingConfig, IRContext, IRLoading, IRTransform} import test_util.{BASILTest, CaptureOutput} import ir.dsl.given import ir.dsl.IfThenBlocks +import analysis.AnalysisManager def load(s: InterpreterState, global: SpecGlobal): Option[BitVecLiteral] = { val f = NormalInterpreter @@ -48,8 +49,9 @@ class InterpreterTests extends AnyFunSuite with CaptureOutput with BeforeAndAfte dumpIL = None ) - val p = IRLoading.load(loading) - val ctx = IRTransform.doCleanup(p) + val ctx = IRLoading.load(loading) + IRTransform.DoCleanup()(ctx, AnalysisManager(ctx.program)) + ir.transforms.clearParams(ctx.program) // val bapProgram = loadBAP(loading.inputFile) // val (symbols, externalFunctions, globals, _, mainAddress) = loadReadELF(loading.relfFile, loading) From b3e2bf406e8c08ebded745aea73ef40075a384d8 Mon Sep 17 00:00:00 2001 From: James Tobler <64625414+j-tobler@users.noreply.github.com> Date: Fri, 6 Jun 2025 11:35:36 +1000 Subject: [PATCH 08/30] scalafmt --- src/main/scala/analysis/StaticAnalysis.scala | 9 +-- src/main/scala/ir/transforms/Transform.scala | 6 +- src/main/scala/util/RunUtils.scala | 61 +++++++++++++------- src/main/scala/util/Runner.scala | 4 +- 4 files changed, 50 insertions(+), 30 deletions(-) diff --git a/src/main/scala/analysis/StaticAnalysis.scala b/src/main/scala/analysis/StaticAnalysis.scala index 27ae15470e..b2a8be74ec 100644 --- a/src/main/scala/analysis/StaticAnalysis.scala +++ b/src/main/scala/analysis/StaticAnalysis.scala @@ -3,7 +3,6 @@ package analysis import ir.Program import util.PerformanceTimer - /** Provides a consistent interface for static analyses. * Similar to Transform, but returns a result rather than modifying the IR in-place. * @@ -11,7 +10,7 @@ import util.PerformanceTimer * @param name The name of this analysis. */ trait StaticAnalysis[ReturnType](val name: String) { - + val t = PerformanceTimer(name) protected def preRun(): Unit = {} @@ -22,8 +21,10 @@ trait StaticAnalysis[ReturnType](val name: String) { def apply(prog: Program, analyses: AnalysisManager): ReturnType = { if (analyses.program ne prog) { - throw new RuntimeException(s"Analysis $name was passed an AnalysisManager of an IR Program with a different " + - s"reference value than the program being transformed.") + throw new RuntimeException( + s"Analysis $name was passed an AnalysisManager of an IR Program with a different " + + s"reference value than the program being transformed." + ) } preRun() t.checkPoint("start") diff --git a/src/main/scala/ir/transforms/Transform.scala b/src/main/scala/ir/transforms/Transform.scala index 92fa3ebcf9..fca88be274 100644 --- a/src/main/scala/ir/transforms/Transform.scala +++ b/src/main/scala/ir/transforms/Transform.scala @@ -39,8 +39,10 @@ trait Transform(val name: String) { if (!toggle) return if (analyses.program ne ctx.program) { // the program we are transforming should be the same one for which the analysis results were produced - throw new RuntimeException(s"Transform $name was passed an AnalysisManager of an IR Program with a different " + - s"reference value than the program being transformed.") + throw new RuntimeException( + s"Transform $name was passed an AnalysisManager of an IR Program with a different " + + s"reference value than the program being transformed." + ) } preRun(ctx) t.checkPoint("start") diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index fdc92c4852..eaaa302b0d 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -204,14 +204,18 @@ object IRTransform { /** Initial cleanup before analysis. */ - class DoCleanup(doSimplify: Boolean = false) extends TransformBatch("DoCleanup", List( - MakeProcEntriesNonLoops(), - CoalesceBlocksFixpoint(), - ApplyRpo(), - ReplaceJumpsInNonReturningProcs(), - ReplaceReturnsTransform(doSimplify), - RemoveExternalFunctionReferences() - )) { + class DoCleanup(doSimplify: Boolean = false) + extends TransformBatch( + "DoCleanup", + List( + MakeProcEntriesNonLoops(), + CoalesceBlocksFixpoint(), + ApplyRpo(), + ReplaceJumpsInNonReturningProcs(), + ReplaceReturnsTransform(doSimplify), + RemoveExternalFunctionReferences() + ) + ) { override protected def preRun(ctx: IRContext): Unit = { Logger.info("[!] Removing external function calls") // fixme: seems odd? } @@ -225,9 +229,12 @@ object IRTransform { } // todo: not sure where to put this - class DetermineRelevantMemory(maybeStaticAnalysisConfig: Option[StaticAnalysisConfig]) extends Transform("DetermineRelevantMemory") { + class DetermineRelevantMemory(maybeStaticAnalysisConfig: Option[StaticAnalysisConfig]) + extends Transform("DetermineRelevantMemory") { def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer[?]] = { - if (maybeStaticAnalysisConfig.isEmpty || (maybeStaticAnalysisConfig.get.memoryRegions == MemoryRegionsMode.Disabled)) { + if ( + maybeStaticAnalysisConfig.isEmpty || (maybeStaticAnalysisConfig.get.memoryRegions == MemoryRegionsMode.Disabled) + ) { ctx.program.determineRelevantMemory(ctx.globalOffsets) } Set.empty @@ -266,15 +273,20 @@ object IRTransform { /** Cull unneccessary information that does not need to be included in the translation, and infer stack regions, and * add in modifies from the spec. */ - class PrepareForTranslation(config: BASILConfig) extends TransformBatch("PrepareForTranslation", List( - DetermineRelevantMemory(config.staticAnalysis), - StripUnreachableFunctions(config.loading.procedureTrimDepth), - StackSubstitution(toggle = - !config.memoryTransform && - (config.staticAnalysis.isEmpty || (config.staticAnalysis.get.memoryRegions == MemoryRegionsMode.Disabled))), - SetModifies(), - RenameBoogieKeywords() - )) { + class PrepareForTranslation(config: BASILConfig) + extends TransformBatch( + "PrepareForTranslation", + List( + DetermineRelevantMemory(config.staticAnalysis), + StripUnreachableFunctions(config.loading.procedureTrimDepth), + StackSubstitution(toggle = + !config.memoryTransform && + (config.staticAnalysis.isEmpty || (config.staticAnalysis.get.memoryRegions == MemoryRegionsMode.Disabled)) + ), + SetModifies(), + RenameBoogieKeywords() + ) + ) { override protected def postRun(ctx: IRContext): Unit = { // check all blocks with an atomic section exist within the same procedure val visited = mutable.Set[Block]() @@ -893,11 +905,13 @@ object RunUtils { } if (conf.summariseProcedures) { - IRTransform.GenerateProcedureSummaries(q.loading.parameterForm || conf.simplify)(ctx, AnalysisManager(ctx.program)) + IRTransform + .GenerateProcedureSummaries(q.loading.parameterForm || conf.simplify)(ctx, AnalysisManager(ctx.program)) } if (conf.summariseProcedures) { - IRTransform.GenerateProcedureSummaries(q.loading.parameterForm || conf.simplify)(ctx, AnalysisManager(ctx.program)) + IRTransform + .GenerateProcedureSummaries(q.loading.parameterForm || conf.simplify)(ctx, AnalysisManager(ctx.program)) } if (q.runInterpret) { @@ -933,7 +947,10 @@ object RunUtils { IRTransform.PrepareForTranslation(q)(ctx, AnalysisManager(ctx.program)) if (conf.generateRelyGuarantees) { - IRTransform.GenerateRgConditions(ctx.program.procedures.toList.filter(p => p.returnBlock != None))(ctx, AnalysisManager(ctx.program)) + IRTransform.GenerateRgConditions(ctx.program.procedures.toList.filter(p => p.returnBlock != None))( + ctx, + AnalysisManager(ctx.program) + ) } q.loading.dumpIL.foreach(s => { diff --git a/src/main/scala/util/Runner.scala b/src/main/scala/util/Runner.scala index 5a1f6de9a6..22138ce35c 100644 --- a/src/main/scala/util/Runner.scala +++ b/src/main/scala/util/Runner.scala @@ -37,7 +37,7 @@ class DoSimplify(config: BASILConfig) extends TransformBatch("DoSimplify", List( LogIrAfterSlices(), LogSimplificationValidation() )) -*/ + */ class IdentifyLoops extends Transform("IdentifyLoops") { def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer[?]] = { val foundLoops = LoopDetector.identify_loops(ctx.program) @@ -416,4 +416,4 @@ class ssaResults extends StaticAnalysis[ def implementation(prog: Program, analyses: AnalysisManager) = _(prog).analyze() } -*/ + */ From f88644c168fa82205556a3829bc12cc28a4425c6 Mon Sep 17 00:00:00 2001 From: James Tobler <64625414+j-tobler@users.noreply.github.com> Date: Fri, 20 Jun 2025 11:07:32 +1000 Subject: [PATCH 09/30] Created case classes SingleTransform and TransformBatch. Moved transforms out of RunUtils.IRTransforms. Created some new transforms. --- src/main/scala/analysis/AnalysisManager.scala | 48 +- src/main/scala/analysis/StaticAnalysis.scala | 18 +- .../SummaryGenerator.scala | 23 +- .../RelyGuaranteeGeneration.scala | 25 ++ .../ir/transforms/InlinePLTLaunchpad.scala | 7 +- .../ir/transforms/PrepareForTranslation.scala | 61 +++ src/main/scala/ir/transforms/Simp.scala | 158 +++---- .../StripUnreachableFunctions.scala | 120 +++-- src/main/scala/ir/transforms/Transform.scala | 142 ++++-- src/main/scala/util/RunUtils.scala | 201 +-------- src/main/scala/util/Runner.scala | 419 ------------------ src/test/scala/ConditionLiftingTests.scala | 4 +- src/test/scala/DifferentialAnalysisTest.scala | 6 +- src/test/scala/InterpretTestConstProp.scala | 3 +- src/test/scala/TestKnownBitsInterpreter.scala | 3 +- src/test/scala/ir/InterpreterTests.scala | 4 +- 16 files changed, 398 insertions(+), 844 deletions(-) create mode 100644 src/main/scala/ir/transforms/PrepareForTranslation.scala delete mode 100644 src/main/scala/util/Runner.scala diff --git a/src/main/scala/analysis/AnalysisManager.scala b/src/main/scala/analysis/AnalysisManager.scala index 6cb80eb4ec..25bf5de318 100644 --- a/src/main/scala/analysis/AnalysisManager.scala +++ b/src/main/scala/analysis/AnalysisManager.scala @@ -22,11 +22,21 @@ class AnalysisManager(val program: Program) { // allows this memoizer to be called like a function def apply(): ReturnType = { // pass this analysis manager and its associated program to the static analysis - if memo.isEmpty then memo = Some(analysis(program, AnalysisManager.this)) + if memo.isEmpty then memo = Some(analysis(AnalysisManager.this)) memo.get } } + enum Invalidation { + case PreserveAll + case ClobberAll + case PreserveSome(toPreserve: Set[Memoizer[?]]) + case ClobberSome(toClobber: Set[Memoizer[?]]) + } + + // todo: not sure if this is the right approach - maybe we should implement convenience methods instead? + export Invalidation.* + // keep track of all memoizers to ensure we can invalidate all of them private val memoizers: mutable.Set[Memoizer[?]] = mutable.Set.empty @@ -37,33 +47,13 @@ class AnalysisManager(val program: Program) { return mem } - // list of memoizers - these can be directly called via this manager, e.g. val result = manager.exampleAnalysis() -// val intraProcConstProp = register(IntraProcConstantPropagationAnalysis()) -// val interProcConstProp = register(InterProcConstantPropagationAnalysis()) -// val memoryRegionResult = register(MemoryRegionAnalysisSolverAnalysis()) -// val vsaResult = register(ValueSetAnalysisSolverAnalysis()) -// val interLiveVarsResults = register(/* todo */) -// val paramResults = register(/* todo */) -// val steensgaardSolver = register(/* todo */) // fixme: merge these into one analysis result? -// val steensgaardPointsTo = register(/* todo */) -// val steensgaardCallSiteSummary = register(/* todo */) -// val mmmResults = register(/* todo */) -// val reachingDefs = register(/* todo */) -// val regionInjector = register(/* todo */) -// val symbolicAddresses = register(/* todo */) -// val localDSA = register(/* todo */) -// val bottomUpDSA = register(/* todo */) -// val topDownDSA = register(/* todo */) -// val writesToResult = register(/* todo */) -// val ssaResults = register(/* todo */) -// val graResult = register(/* todo */) -// val intraDomain = register(/* todo */) -// val interDomain = register(/* todo */) - - // clears the cached results of all analyses except for those in the given set - def invalidateAllExcept(exceptions: Set[Memoizer[?]]): Unit = - memoizers.filterNot(exceptions.contains).foreach(_.invalidate()) + def invalidate(invalidation: Invalidation): Unit = invalidation match { + case PreserveAll => () + case ClobberAll => memoizers.foreach(_.invalidate()) + case PreserveSome(toPreserve) => (memoizers.toSet -- toPreserve).foreach(_.invalidate()) + case ClobberSome(toClobber) => toClobber.foreach(_.invalidate()) + } - // useful to pass to 'invalidateAllExcept' when we want to preserve all or nearly all results after a transform - def getAll(): Set[Memoizer[?]] = memoizers.toSet + // todo: list of memoizers which can be directly called via this manager + // val exampleAnalysis = register(ExampleAnalysis()) } diff --git a/src/main/scala/analysis/StaticAnalysis.scala b/src/main/scala/analysis/StaticAnalysis.scala index b2a8be74ec..f8dc570a42 100644 --- a/src/main/scala/analysis/StaticAnalysis.scala +++ b/src/main/scala/analysis/StaticAnalysis.scala @@ -13,24 +13,12 @@ trait StaticAnalysis[ReturnType](val name: String) { val t = PerformanceTimer(name) - protected def preRun(): Unit = {} + protected def implementation(man: AnalysisManager): ReturnType - protected def postRun(ret: ReturnType): Unit = {} - - protected def implementation: (Program, AnalysisManager) => ReturnType - - def apply(prog: Program, analyses: AnalysisManager): ReturnType = { - if (analyses.program ne prog) { - throw new RuntimeException( - s"Analysis $name was passed an AnalysisManager of an IR Program with a different " + - s"reference value than the program being transformed." - ) - } - preRun() + def apply(man: AnalysisManager): ReturnType = { t.checkPoint("start") - val ret = implementation(prog, analyses) + val ret = implementation(man) t.checkPoint("end") - postRun(ret) ret } } diff --git a/src/main/scala/analysis/procedure_summaries/SummaryGenerator.scala b/src/main/scala/analysis/procedure_summaries/SummaryGenerator.scala index b01c11adf3..321be166bc 100644 --- a/src/main/scala/analysis/procedure_summaries/SummaryGenerator.scala +++ b/src/main/scala/analysis/procedure_summaries/SummaryGenerator.scala @@ -12,7 +12,9 @@ import ir.transforms.{ ProcAbstractDomain, SCCCallgraphWorklistSolver, reversePostOrder, - worklistSolver + worklistSolver, + Transform, + SingleTransform } case class Condition(pred: Predicate, label: Option[String] = None) @@ -219,3 +221,22 @@ class SummaryGenerator(program: Program, parameterForm: Boolean = false) { .filter(_ != TrueBLiteral) } } + +def getGenerateProcedureSummariesTransform(simplified: Boolean): Transform = + SingleTransform( + "GenerateProcedureSummaries", + (ctx, man) => { + val prog = ctx.program + // Need to know modifies clauses to generate summaries, but this is probably out of place (fixme) + val specModifies = ctx.specification.subroutines.map(s => s.name -> s.modifies).toMap + prog.setModifies(specModifies) + + val summaryGenerator = SummaryGenerator(prog, simplified) + for procedure <- prog.procedures if procedure != prog.mainProcedure do + procedure.requires = summaryGenerator.generateRequires(procedure) + procedure.ensures = summaryGenerator.generateEnsures(procedure) + + man.ClobberAll + }, + notice = "Generating Procedure Summaries" + ) diff --git a/src/main/scala/analysis/rely_guarantee_generation/RelyGuaranteeGeneration.scala b/src/main/scala/analysis/rely_guarantee_generation/RelyGuaranteeGeneration.scala index 9e3bdfd23e..14a934195e 100644 --- a/src/main/scala/analysis/rely_guarantee_generation/RelyGuaranteeGeneration.scala +++ b/src/main/scala/analysis/rely_guarantee_generation/RelyGuaranteeGeneration.scala @@ -4,6 +4,7 @@ import ir.* import ir.transforms.* import analysis.* import scala.collection.mutable.Queue +import util.StaticAnalysisLogger /** To generate guarantee conditions, we need to: * 1. Generate the set of reachable states using the state domain. @@ -149,3 +150,27 @@ class GuarGenSummaryGenerator[T, S](dom: InterferenceProductDomain[T, S]) // we want to expand the previous postcondition by joining this one dom.pureJoin(prevSummary, resAfter(p.returnBlock.get)) } + +def getGenerateRgConditionsTransform(threads: List[Procedure]): Transform = + SingleTransform( + "GenerateRgConditions", + (ctx, man) => { + type StateLatticeElement = LatticeMap[Variable, analysis.Interval] + type InterferenceLatticeElement = Map[Variable, StateLatticeElement] + val stateLattice = IntervalLatticeExtension() + val stateTransfer = SignedIntervalDomain().transfer + val intDom = ConditionalWritesDomain[StateLatticeElement](stateLattice, stateTransfer) + val relyGuarantees = + RelyGuaranteeGenerator[InterferenceLatticeElement, StateLatticeElement](intDom).generate(threads) + // fixme: these should not be printed to stdout + for ((p, (rely, guar)) <- relyGuarantees) { + StaticAnalysisLogger.info("--- " + p.procName + " " + "-" * 50 + "\n") + StaticAnalysisLogger.info("Rely:") + StaticAnalysisLogger.info(intDom.toString(rely) + "\n") + StaticAnalysisLogger.info("Guarantee:") + StaticAnalysisLogger.info(intDom.toString(guar) + "\n") + } + man.ClobberAll + }, + notice = "Generating Rely-Guarantee Conditions" + ) diff --git a/src/main/scala/ir/transforms/InlinePLTLaunchpad.scala b/src/main/scala/ir/transforms/InlinePLTLaunchpad.scala index b50c3e6136..f64786348e 100644 --- a/src/main/scala/ir/transforms/InlinePLTLaunchpad.scala +++ b/src/main/scala/ir/transforms/InlinePLTLaunchpad.scala @@ -1,7 +1,8 @@ package ir.transforms import ir.Program -def inlinePLTLaunchpad(prog: Program) = { +val inlinePLTLaunchpad = SingleTransform("InlinePLTLaunchpad", (ctx, man) => { + val prog = ctx.program for (p <- prog.procedures) { val candidate = @@ -19,5 +20,5 @@ def inlinePLTLaunchpad(prog: Program) = { } applyRPO(prog) - -} + man.ClobberAll +}) diff --git a/src/main/scala/ir/transforms/PrepareForTranslation.scala b/src/main/scala/ir/transforms/PrepareForTranslation.scala new file mode 100644 index 0000000000..797cbef377 --- /dev/null +++ b/src/main/scala/ir/transforms/PrepareForTranslation.scala @@ -0,0 +1,61 @@ +package ir.transforms + +import scala.collection.mutable +import ir.{StackSubstituter, Renamer, Block} +import ir.invariant +import util.BASILConfig +import analysis.AnalysisManager + +// run iff arg.isEmpty || (arg.get.memoryRegions == MemoryRegionsMode.Disabled) +val determineRelevantMemory = SingleTransform("DetermineRelevantMemory", (ctx, man) => { + ctx.program.determineRelevantMemory(ctx.globalOffsets) + man.ClobberAll +}) + +// run iff arg +val stackSubstitution = SingleTransform("StackSubstitution", (ctx, man) => { + StackSubstituter().visitProgram(ctx.program) + man.ClobberAll +}) + +val setModifies = SingleTransform("SetModifies", (ctx, man) => { + val specModifies = ctx.specification.subroutines.map(s => s.name -> s.modifies).toMap + ctx.program.setModifies(specModifies) + man.ClobberAll +}) + +def getRenameBoogieKeywordsTransform(boogieReserved: Set[String]): Transform = + SingleTransform("RenameBoogieKeywords", (ctx, man) => { + Renamer(boogieReserved).visitProgram(ctx.program) + man.ClobberAll + }) + +/** Cull unneccessary information that does not need to be included in the translation, and infer stack regions, and + * add in modifies from the spec. + */ +def getPrepareForTranslationTransform(config: BASILConfig, boogieReserved: Set[String]): Transform = TransformBatch( + "PrepareForTranslation", + List( + determineRelevantMemory, // run iff config.staticAnalysis.isEmpty || (config.staticAnalysis.get.memoryRegions == MemoryRegionsMode.Disabled) + getStripUnreachableFunctionsTransform(config.loading.procedureTrimDepth), + stackSubstitution, // run iff !config.memoryTransform && (config.staticAnalysis.isEmpty || (config.staticAnalysis.get.memoryRegions == MemoryRegionsMode.Disabled)) + setModifies, + getRenameBoogieKeywordsTransform(boogieReserved: Set[String]) + ), + postRunChecks = ctx => { + assert(invariant.singleCallBlockEnd(ctx.program)) + // check all blocks with an atomic section exist within the same procedure + val visited = mutable.Set[Block]() + for (p <- ctx.program.procedures) { + for (b <- p.blocks) { + if (!visited.contains(b)) { + if (b.atomicSection.isDefined) { + b.atomicSection.get.getBlocks.foreach { a => assert(a.parent == p) } + visited.addAll(b.atomicSection.get.getBlocks) + } + visited.addOne(b) + } + } + } + } +) diff --git a/src/main/scala/ir/transforms/Simp.scala b/src/main/scala/ir/transforms/Simp.scala index 09b6ecbfcc..c467225f0b 100644 --- a/src/main/scala/ir/transforms/Simp.scala +++ b/src/main/scala/ir/transforms/Simp.scala @@ -841,23 +841,15 @@ def coalesceBlocks(p: Program): Boolean = { didAny } -class CoalesceBlocks(name: String = "CoalesceBlocks") extends Transform(name) { - // todo: make these protected (?) - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer[?]] = { - coalesceBlocks(ctx.program) - Set.empty - } -} +val coalesceBlocksOnce = SingleTransform("CoalesceBlocksOnce", (ctx, man) => { + coalesceBlocks(ctx.program) + man.ClobberAll +}) -class CoalesceBlocksFixpoint extends CoalesceBlocks("CoalesceBlocksFixpoint") { - - override def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer[?]] = { - // useful for ReplaceReturns - // (pushes single block with `Unreachable` into its predecessor) - while (coalesceBlocks(ctx.program)) {} - Set.empty - } -} +val coalesceBlocksFixpoint = SingleTransform("CoalesceBlocksFixpoint", (ctx, man) => { + while (coalesceBlocks(ctx.program)) {} + man.ClobberAll +}) def removeDeadInParams(p: Program): Boolean = { var modified = false @@ -1165,12 +1157,10 @@ def applyRPO(p: Program) = { } } -class ApplyRpo extends Transform("ApplyRpo") { - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer[?]] = { - applyRPO(ctx.program) - Set.empty - } -} +val applyRpoTransform = SingleTransform("ApplyRPO", (ctx, man) => { + applyRPO(man.program) + man.ClobberAll +}) object getProcFrame { class GetProcFrame(frames: Procedure => Set[Memory]) extends CILVisitor { @@ -1636,49 +1626,66 @@ def findDefinitelyExits(p: Program): ProcReturnInfo = { ) } -// todo: not sure whether to make 'findDefinitelyExits' a private method of this class -class ReplaceJumpsInNonReturningProcs extends Transform("ReplaceJumpsInNonReturningProcs") { - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer[?]] = { - val nonReturning = findDefinitelyExits(ctx.program) - ctx.program.mainProcedure.foreach { - case d: DirectCall if nonReturning.nonreturning.contains(d.target) => d.parent.replaceJump(Return()) - case _ => - } - Set.empty +val replaceJumpsInNonReturningProcs = SingleTransform("ReplaceJumpsInNonReturningProcs", (ctx, man) => { + val nonReturning = findDefinitelyExits(ctx.program) + ctx.program.mainProcedure.foreach { + case d: DirectCall if nonReturning.nonreturning.contains(d.target) => d.parent.replaceJump(Return()) + case _ => } -} + man.ClobberAll +}) + +def getReplaceReturnsTransform(doSimplify: Boolean): Transform = SingleTransform("ReplaceReturns", (ctx, man) => { + cilvisitor.visit_prog( + ReplaceReturns(proc => doSimplify && ctx.program.mainProcedure != proc), + ctx.program + ) + transforms.addReturnBlocks(ctx.program, insertR30InvariantAssertion = _ => doSimplify) + cilvisitor.visit_prog(transforms.ConvertSingleReturn(), ctx.program) + man.ClobberAll +}) + +val removeExternalFunctionReferences = SingleTransform("RemoveExternalFunctionReferences", (ctx, man) => { + val externalNames = ctx.externalFunctions.map(_.name) + val unqualifiedNames = externalNames.filter(_.contains('@')).map(_.split('@')(0)) + ExternalRemover(externalNames ++ unqualifiedNames).visitProgram(ctx.program) + for (p <- ctx.program.procedures) { + p.isExternal = Some( + ctx.externalFunctions.exists(e => e.name == p.procName || p.address.contains(e.offset)) || p.isExternal + .getOrElse(false) + ) + } + man.ClobberAll +}) -// todo: i have no idea what to do with this -class ReplaceReturnsTransform(doSimplify: Boolean) extends Transform("ReplaceReturns") { - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer[?]] = { +def getDoCleanupTransform(doSimplify: Boolean): Transform = TransformBatch( + "DoCleanup", + List( + makeProcEntriesNonLoops, + // useful for ReplaceReturns + // (pushes single block with `Unreachable` into its predecessor) + coalesceBlocksFixpoint, + applyRpoTransform, + replaceJumpsInNonReturningProcs, // FIXME: Main will often maintain the stack by loading R30 from the caller's stack frame // before returning, which makes the R30 assertin faile. Hence we currently skip this // assertion for main, instead we should precondition the stack layout before main // but the interaction between spec and memory regions is nontrivial currently - cilvisitor.visit_prog( - transforms.ReplaceReturns(proc => doSimplify && ctx.program.mainProcedure != proc), - ctx.program - ) - transforms.addReturnBlocks(ctx.program, insertR30InvariantAssertion = _ => doSimplify) - cilvisitor.visit_prog(transforms.ConvertSingleReturn(), ctx.program) - Set.empty - } -} - -class RemoveExternalFunctionReferences extends Transform("RemoveExternalFunctionReferences") { - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer[?]] = { - val externalNames = ctx.externalFunctions.map(_.name) - val unqualifiedNames = externalNames.filter(_.contains('@')).map(_.split('@')(0)) - ExternalRemover(externalNames ++ unqualifiedNames).visitProgram(ctx.program) - for (p <- ctx.program.procedures) { - p.isExternal = Some( - ctx.externalFunctions.exists(e => e.name == p.procName || p.address.contains(e.offset)) || p.isExternal - .getOrElse(false) - ) - } - Set.empty - } -} + getReplaceReturnsTransform(doSimplify), + removeExternalFunctionReferences + ), + notice = "Removing external function calls", // fixme: seems odd? + postRunChecks = ctx => { + assert(invariant.singleCallBlockEnd(ctx.program)) + assert(invariant.cfgCorrect(ctx.program)) + assert(invariant.blocksUniqueToEachProcedure(ctx.program)) + assert(invariant.procEntryNoIncoming(ctx.program)) + } +) + +// these are called a lot so it's useful to create them here rather than generating many copies on the fly +val doCleanupWithSimplify = getDoCleanupTransform(true) +val doCleanupWithoutSimplify = getDoCleanupTransform(false) class Simplify(val res: Boolean => Variable => Option[Expr], val initialBlock: Block = null) extends CILVisitor { @@ -1889,21 +1896,20 @@ def removeTriviallyDeadBranches(p: Program, removeAllUnreachableBlocks: Boolean dead.nonEmpty } -class MakeProcEntriesNonLoops extends Transform("MakeProcEntriesNonLoops") { - // ensure procedure entry has no incoming jumps, if it does replace with new - // block jumping to the old procedure entry - private def makeProcEntryNonLoop(p: Procedure) = { - if (p.entryBlock.exists(_.prevBlocks.nonEmpty)) { - val nb = Block(p.name + "_entry") - p.addBlock(nb) - val eb = p.entryBlock.get - nb.replaceJump(GoTo(eb)) - p.entryBlock = nb - } - } - - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer[?]] = { - ctx.program.procedures.foreach(makeProcEntryNonLoop) - Set.empty +val makeProcEntriesNonLoops = SingleTransform( + "MakeProcEntriesNonLoops", + (ctx, man) => { + ctx.program.procedures.foreach(p => { + // ensure procedure entry has no incoming jumps, if it does replace with new + // block jumping to the old procedure entry + if (p.entryBlock.exists(_.prevBlocks.nonEmpty)) { + val nb = Block(p.name + "_entry") + p.addBlock(nb) + val eb = p.entryBlock.get + nb.replaceJump(GoTo(eb)) + p.entryBlock = nb + } + }) + man.ClobberAll } -} +) diff --git a/src/main/scala/ir/transforms/StripUnreachableFunctions.scala b/src/main/scala/ir/transforms/StripUnreachableFunctions.scala index e36dca9eec..968f426cb3 100644 --- a/src/main/scala/ir/transforms/StripUnreachableFunctions.scala +++ b/src/main/scala/ir/transforms/StripUnreachableFunctions.scala @@ -8,78 +8,76 @@ import util.IRContext import analysis.AnalysisManager // This shouldn't be run before indirect calls are resolved -class StripUnreachableFunctions(depth: Int) extends Transform("StripUnreachableFunctions") { - private var before: Int = 0 +def stripUnreachableFunctions(p: Program, depth: Int): Unit = { + val procedureCalleeNames = p.procedures.map(f => f -> f.calls).toMap - private def stripUnreachableFunctions(p: Program): Unit = { - val procedureCalleeNames = p.procedures.map(f => f -> f.calls).toMap + val toVisit: mutable.LinkedHashSet[(Int, Procedure)] = mutable.LinkedHashSet((0, p.mainProcedure)) + var reachableFound = true + val reachableNames = mutable.HashMap[Procedure, Int]() + while (toVisit.nonEmpty) { + val next = toVisit.head + toVisit.remove(next) - val toVisit: mutable.LinkedHashSet[(Int, Procedure)] = mutable.LinkedHashSet((0, p.mainProcedure)) - var reachableFound = true - val reachableNames = mutable.HashMap[Procedure, Int]() - while (toVisit.nonEmpty) { - val next = toVisit.head - toVisit.remove(next) + if (next._1 <= depth) { - if (next._1 <= depth) { - - def addName(depth: Int, name: Procedure): Unit = { - val oldDepth = reachableNames.getOrElse(name, Integer.MAX_VALUE) - reachableNames.put(next._2, if depth < oldDepth then depth else oldDepth) - } - addName(next._1, next._2) - - val callees = procedureCalleeNames(next._2) - - toVisit.addAll(callees.diff(reachableNames.keySet).map(c => (next._1 + 1, c))) - callees.foreach(c => addName(next._1 + 1, c)) + def addName(depth: Int, name: Procedure): Unit = { + val oldDepth = reachableNames.getOrElse(name, Integer.MAX_VALUE) + reachableNames.put(next._2, if depth < oldDepth then depth else oldDepth) } - } - assert(invariant.cfgCorrect(p)) - val removed = p.procedures.filterNot(f => reachableNames.keySet.contains(f)).toSet - // p.procedures = p.procedures.filter(f => reachableNames.keySet.contains(f.name)) - for (proc <- removed) { - p.removeProcedure(proc) - } - - for (elem <- p.procedures.filter(c => c.calls.exists(s => removed.contains(s)))) { - // last layer is analysed only as specifications so we remove the body for anything that calls - // a function we have removed + addName(next._1, next._2) - elem.clearBlocks() - assert(elem.entryBlock.isEmpty) - assert(elem.returnBlock.isEmpty) + val callees = procedureCalleeNames(next._2) + toVisit.addAll(callees.diff(reachableNames.keySet).map(c => (next._1 + 1, c))) + callees.foreach(c => addName(next._1 + 1, c)) } - assert(invariant.blocksUniqueToEachProcedure(p)) - assert(invariant.cfgCorrect(p)) } - - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer[?]] = { - stripUnreachableFunctions(ctx.program) - Set.empty + assert(invariant.cfgCorrect(p)) + val removed = p.procedures.filterNot(f => reachableNames.keySet.contains(f)).toSet + // p.procedures = p.procedures.filter(f => reachableNames.keySet.contains(f.name)) + for (proc <- removed) { + p.removeProcedure(proc) } - override protected def preRun(ctx: IRContext): Unit = { - Logger.info("[!] Stripping unreachable") - before = ctx.program.procedures.size - } + for (elem <- p.procedures.filter(c => c.calls.exists(s => removed.contains(s)))) { + // last layer is analysed only as specifications so we remove the body for anything that calls + // a function we have removed - override protected def postRun(ctx: IRContext): Unit = { - Logger.info( - s"[!] Removed ${before - ctx.program.procedures.size} functions (${ctx.program.procedures.size} remaining)" - ) - val dupProcNames = ctx.program.procedures.groupBy(_.name).filter((_, p) => p.size > 1).toList.flatMap(_(1)) - assert(dupProcNames.isEmpty) + elem.clearBlocks() + assert(elem.entryBlock.isEmpty) + assert(elem.returnBlock.isEmpty) - ctx.program.procedures.foreach(p => - p.blocks.foreach(b => { - b.jump match { - case GoTo(targs, _) if targs.isEmpty => - Logger.warn(s"block ${b.label} in subroutine ${p.name} has no outgoing edges") - case _ => () - } - }) - ) } + assert(invariant.blocksUniqueToEachProcedure(p)) + assert(invariant.cfgCorrect(p)) } + +def getStripUnreachableFunctionsTransform(depth: Int): Transform = + SingleTransform( + "StripUnreachableFunctions", + (ctx, man) => { + val before = ctx.program.procedures.size + stripUnreachableFunctions(ctx.program, depth) + Logger.info( + s"[!] Removed ${before - ctx.program.procedures.size} functions (${ctx.program.procedures.size} remaining)" + ) + + /* Fixme: Since refactoring RunUtils, the following code runs when this transform is invoked by the + loadAndTranslate function, whereas it used to only run when invoked by the prepareForTranslation function. I don't + know if this is problematic. */ + val dupProcNames = ctx.program.procedures.groupBy(_.name).filter((_, p) => p.size > 1).toList.flatMap(_(1)) + assert(dupProcNames.isEmpty) + + ctx.program.procedures.foreach(p => + p.blocks.foreach(b => { + b.jump match { + case GoTo(targs, _) if targs.isEmpty => + Logger.warn(s"block ${b.label} in subroutine ${p.name} has no outgoing edges") + case _ => () + } + }) + ) + man.ClobberAll + }, + notice = "Stripping Unreachable" + ) diff --git a/src/main/scala/ir/transforms/Transform.scala b/src/main/scala/ir/transforms/Transform.scala index fca88be274..93695826a0 100644 --- a/src/main/scala/ir/transforms/Transform.scala +++ b/src/main/scala/ir/transforms/Transform.scala @@ -1,68 +1,116 @@ package ir.transforms -import util.IRContext -import ir.Program +import scala.collection.mutable +import util.{IRContext, Logger, DebugDumpIRLogger, PerformanceTimer} +import ir.{Program, dotBlockGraph} import analysis.AnalysisManager -import util.PerformanceTimer +import translating.PrettyPrinter.pp_prog +import java.io.File -/** Provides a consistent interface for IR transforms. - * - * @param name The name of this transform. + +// TransformConfig accepts Log instances which specify what kind of logs to dump for particular transforms +trait Log { + def dump(ctx: IRContext, transformName: String): Unit +} + +// dumps a blockgraph log +case class BlockgraphLog(filenamePrefix: String) extends Log { + def dump(ctx: IRContext, transformName: String): Unit = + DebugDumpIRLogger.writeToFile( + File(s"${filenamePrefix}_blockgraph-${transformName}.dot"), + dotBlockGraph(ctx.program.mainProcedure)) +} + +// dumps an IR log +case class IrLog(filenamePrefix: String) extends Log { + def dump(ctx: IRContext, transformName: String): Unit = + DebugDumpIRLogger.writeToFile(File(s"${filenamePrefix}_il-${transformName}.il"), pp_prog(ctx.program)) +} + +/** Allows the behaviour of transforms to be configured at runtime, upon invocation. + * + * @param disabled Optionally specify a set of transforms to disable. + * @param dumpLogs Optionally specify which logs to dump for which transforms, if any. */ -trait Transform(val name: String) { +case class TransformConfig( + disabled: Set[Transform] = Set.empty, + dumpLogs: Map[Transform, Set[Log]] = Map.empty +) - // when false, this transform does nothing - val toggle: Boolean = true - // the performance of each transform is implicitly tested - val t = PerformanceTimer(name) +// default value for transforms +val emptyConfig = TransformConfig() - // code to run before the transform implementation, such as logging information - protected def preRun(ctx: IRContext): Unit = {} +/** Currently, we have two kinds of transforms: SingleTransform, and TransformBatch. This trait provides a common + * interface for them to share. + * + * Transforms can be directly called to invoke the shared 'apply' method, which applies the transform in the context + * of some runtime configuration. They are designed to be read-only to avoid inter-dependencies between their users; if + * some configuration is required, it must be provided upon invocation. + */ +trait Transform { + // human-readable name of the transform; it is used in the names of generated log files + val name: String + // optional message to log upon invocation of this transform + val notice: String + // the performance of each transform is implicitly measured + val timer: PerformanceTimer = PerformanceTimer(name) - // code to run after the transform implementation, such as logging information or assertions - protected def postRun(ctx: IRContext): Unit = {} + // modifies the given IR context in-place, using the analysis results provided by this analysis manager + protected def transform(ctx: IRContext, man: AnalysisManager, config: TransformConfig): man.Invalidation - /** Override this method to implement the logic for your transform. - * - * @param ctx The IR to be modified in-place. - * @param analyses Use this to access the results of static analyses. Any results not yet generated will be produced - * automatically and then cached in the manager for later retrieval. - * - * @return The set of analyses that are *preserved* after the transform. To clear all analyses after the transform is - * invoked, return Set.empty. (Note that this will negatively impact performance.) To preserve all analyses, return - * analyses.getAll(). - */ - protected def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer[?]] + // optional code to run *after* performance has been measured but *before* any logs are dumped, e.g. post-run checks + protected def postRun(ctx: IRContext): Unit - // instances of transforms can be directly called to invoke this method - def apply(ctx: IRContext, analyses: AnalysisManager): Unit = { - if (!toggle) return - if (analyses.program ne ctx.program) { + // executes the transform with any modifications or book-keeping specified by the given config + def apply(ctx: IRContext, man: AnalysisManager, config: TransformConfig = emptyConfig): Unit = { + if (config.disabled.contains(this)) return + if (notice != "") then Logger.info(s"[!] ${notice}") + if (man.program ne ctx.program) { // the program we are transforming should be the same one for which the analysis results were produced throw new RuntimeException( - s"Transform $name was passed an AnalysisManager of an IR Program with a different " + + s"Transform '$name' was passed an AnalysisManager of an IR Program with a different " + s"reference value than the program being transformed." ) } - preRun(ctx) - t.checkPoint("start") - val toPreserve = implementation(ctx, analyses) - t.checkPoint("end") - postRun(ctx) - analyses.invalidateAllExcept(toPreserve) + val maybeLogs = config.dumpLogs.get(this) + maybeLogs.foreach(_.foreach(_.dump(ctx, s"before-$name"))) + timer.checkPoint("start") + val invalidation = transform(ctx, man, config) // run the actual transform and get the analysis results to clobber + timer.checkPoint("end") + postRun(ctx) // runs after performance checks, and before logging + maybeLogs.foreach(_.foreach(_.dump(ctx, s"after-$name"))) + man.invalidate(invalidation) // clobber the specified analysis results } } -/** A transform can be a sequence of other transforms. We prefer using this over constructing transforms in the - * implementations of other transforms. - * - * @param name The name of this transform batch. - * @param transforms The sequence of other transforms that comprise this transform. +/** A standard transform. Accepts an implementation function which modifies the given IR context in-place. */ -class TransformBatch(name: String, transforms: List[Transform]) extends Transform(name) { - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer[?]] = { - // simply apply each transform in-turn - transforms.foreach(_(ctx, analyses)) - Set.empty +case class SingleTransform( + name: String, + implementation: (ctx: IRContext, man: AnalysisManager) => man.Invalidation, + notice: String = "" +) extends Transform { + // simply calls the given implementation function + def transform(ctx: IRContext, man: AnalysisManager, config: TransformConfig): man.Invalidation = + implementation(ctx, man) + + // standard transforms don't need anything here; post-run checks should be handled by the implementation + def postRun(ctx: IRContext): Unit = () +} + +/** A transform batch is a sequence of other transforms, followed by some optional post-run checks on the IR context. + */ +case class TransformBatch( + name: String, + transforms: List[Transform], + notice: String = "", + postRunChecks: IRContext => Unit = _ => () +) extends Transform { + // runs each sub-transform in-turn (invalidation is handled by the sub-transforms) + def transform(ctx: IRContext, man: AnalysisManager, config: TransformConfig): man.Invalidation = { + transforms.foreach(_(ctx, man, config)) + man.PreserveAll } + + def postRun(ctx: IRContext): Unit = postRunChecks(ctx) } diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index eaaa302b0d..f49b3c31c8 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -195,164 +195,6 @@ object IRLoading { } } -/** Methods related to transforming the IR `Program` in-place. - * - * These operate over the IRContext, and possibly use static analysis results. - */ -object IRTransform { - val boogieReserved: Set[String] = Set("free") - - /** Initial cleanup before analysis. - */ - class DoCleanup(doSimplify: Boolean = false) - extends TransformBatch( - "DoCleanup", - List( - MakeProcEntriesNonLoops(), - CoalesceBlocksFixpoint(), - ApplyRpo(), - ReplaceJumpsInNonReturningProcs(), - ReplaceReturnsTransform(doSimplify), - RemoveExternalFunctionReferences() - ) - ) { - override protected def preRun(ctx: IRContext): Unit = { - Logger.info("[!] Removing external function calls") // fixme: seems odd? - } - - override protected def postRun(ctx: IRContext): Unit = { - assert(invariant.singleCallBlockEnd(ctx.program)) - assert(invariant.cfgCorrect(ctx.program)) - assert(invariant.blocksUniqueToEachProcedure(ctx.program)) - assert(invariant.procEntryNoIncoming(ctx.program)) - } - } - - // todo: not sure where to put this - class DetermineRelevantMemory(maybeStaticAnalysisConfig: Option[StaticAnalysisConfig]) - extends Transform("DetermineRelevantMemory") { - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer[?]] = { - if ( - maybeStaticAnalysisConfig.isEmpty || (maybeStaticAnalysisConfig.get.memoryRegions == MemoryRegionsMode.Disabled) - ) { - ctx.program.determineRelevantMemory(ctx.globalOffsets) - } - Set.empty - } - } - - // todo: not sure where to put this - class StackSubstitution(override val toggle: Boolean = true) extends Transform("StackSubstitution") { - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer[?]] = { - StackSubstituter().visitProgram(ctx.program) - Set.empty - } - } - - // todo: not sure where to put this - class SetModifies extends Transform("SetModifies") { - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer[?]] = { - val specModifies = ctx.specification.subroutines.map(s => s.name -> s.modifies).toMap - ctx.program.setModifies(specModifies) - Set.empty - } - } - - // todo: not sure where to put this - class RenameBoogieKeywords extends Transform("RenameBoogieKeywords") { - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer[?]] = { - Renamer(boogieReserved).visitProgram(ctx.program) - Set.empty - } - - override protected def postRun(ctx: IRContext): Unit = { - assert(invariant.singleCallBlockEnd(ctx.program)) - } - } - - /** Cull unneccessary information that does not need to be included in the translation, and infer stack regions, and - * add in modifies from the spec. - */ - class PrepareForTranslation(config: BASILConfig) - extends TransformBatch( - "PrepareForTranslation", - List( - DetermineRelevantMemory(config.staticAnalysis), - StripUnreachableFunctions(config.loading.procedureTrimDepth), - StackSubstitution(toggle = - !config.memoryTransform && - (config.staticAnalysis.isEmpty || (config.staticAnalysis.get.memoryRegions == MemoryRegionsMode.Disabled)) - ), - SetModifies(), - RenameBoogieKeywords() - ) - ) { - override protected def postRun(ctx: IRContext): Unit = { - // check all blocks with an atomic section exist within the same procedure - val visited = mutable.Set[Block]() - for (p <- ctx.program.procedures) { - for (b <- p.blocks) { - if (!visited.contains(b)) { - if (b.atomicSection.isDefined) { - b.atomicSection.get.getBlocks.foreach { a => assert(a.parent == p) } - visited.addAll(b.atomicSection.get.getBlocks) - } - visited.addOne(b) - } - } - } - } - } - - class GenerateProcedureSummaries(simplified: Boolean = false) extends Transform("GenerateProcedureSummaries") { - // (?) removed the 'modified' variable that we used to return from this function - // (?) removed the 'IRProgram' parameter - using ctx.program instead - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer[?]] = { - val prog = ctx.program - // Need to know modifies clauses to generate summaries, but this is probably out of place (fixme) - val specModifies = ctx.specification.subroutines.map(s => s.name -> s.modifies).toMap - prog.setModifies(specModifies) - - val summaryGenerator = SummaryGenerator(prog, simplified) - for procedure <- prog.procedures if procedure != prog.mainProcedure do - procedure.requires = summaryGenerator.generateRequires(procedure) - procedure.ensures = summaryGenerator.generateEnsures(procedure) - - Set.empty - } - - override protected def preRun(ctx: IRContext): Unit = { - StaticAnalysisLogger.info("[!] Generating Procedure Summaries") - } - } - - class GenerateRgConditions(threads: List[Procedure]) extends Transform("GenerateRgConditions") { - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer[?]] = { - /* Todo: For the moment we are printing these to stdout, but in future we'd - like to add them to the IR. */ - type StateLatticeElement = LatticeMap[Variable, analysis.Interval] - type InterferenceLatticeElement = Map[Variable, StateLatticeElement] - val stateLattice = IntervalLatticeExtension() - val stateTransfer = SignedIntervalDomain().transfer - val intDom = ConditionalWritesDomain[StateLatticeElement](stateLattice, stateTransfer) - val relyGuarantees = - RelyGuaranteeGenerator[InterferenceLatticeElement, StateLatticeElement](intDom).generate(threads) - for ((p, (rely, guar)) <- relyGuarantees) { - StaticAnalysisLogger.info("--- " + p.procName + " " + "-" * 50 + "\n") - StaticAnalysisLogger.info("Rely:") - StaticAnalysisLogger.info(intDom.toString(rely) + "\n") - StaticAnalysisLogger.info("Guarantee:") - StaticAnalysisLogger.info(intDom.toString(guar) + "\n") - } - Set.empty - } - - override protected def preRun(ctx: IRContext): Unit = { - StaticAnalysisLogger.info("[!] Generating Rely-Guarantee Conditions") - } - } -} - /** Methods relating to program static analysis. */ object StaticAnalysis { @@ -718,7 +560,8 @@ object RunUtils { transforms.OnePassDSA().applyTransform(program) - transforms.inlinePLTLaunchpad(ctx.program) + // fixme: this used to be a plain function but now we have to supply an analysis manager! + transforms.inlinePLTLaunchpad(ctx, AnalysisManager(ctx.program)) transforms.removeEmptyBlocks(program) @@ -839,15 +682,22 @@ object RunUtils { assert(invariant.cfgCorrect(ctx.program)) assert(invariant.blocksUniqueToEachProcedure(ctx.program)) - IRTransform.DoCleanup(conf.simplify)(ctx, AnalysisManager(ctx.program)) + val analysisManager = AnalysisManager(ctx.program) - transforms.inlinePLTLaunchpad(ctx.program) + // these transforms depend on basil config parameters and thus need to be constructed here + val prepareForTranslation = getPrepareForTranslationTransform(q, Set("free")) + val genProcSummaries = getGenerateProcedureSummariesTransform(q.loading.parameterForm || conf.simplify) + val genRgConditions = getGenerateRgConditionsTransform(ctx.program.procedures.toList.filter(_.returnBlock != None)) + val stripUnreachableFunctions = getStripUnreachableFunctionsTransform(q.loading.procedureTrimDepth) - if (q.loading.trimEarly) { - StripUnreachableFunctions(q.loading.procedureTrimDepth)(ctx, AnalysisManager(ctx.program)) - // todo: since refactoring, there is some extra code that is run here - // namely, the portion of StripUnreachableFunctions.postRun after the logger - } + if conf.simplify then doCleanupWithSimplify(ctx, analysisManager) + else doCleanupWithoutSimplify(ctx, analysisManager) + + transforms.inlinePLTLaunchpad(ctx, analysisManager) + + if (q.loading.trimEarly) then stripUnreachableFunctions(ctx, analysisManager) + // todo: since refactoring, there is some extra code that is run here + // see StripUnreachableFunctions.getStripUnreachableFunctionsTransform ctx.program.procedures.foreach(transforms.RemoveUnreachableBlocks.apply) Logger.info(s"[!] Removed unreachable blocks") @@ -904,15 +754,7 @@ object RunUtils { memTransferTimer.checkPoint("Performed Memory Transform") } - if (conf.summariseProcedures) { - IRTransform - .GenerateProcedureSummaries(q.loading.parameterForm || conf.simplify)(ctx, AnalysisManager(ctx.program)) - } - - if (conf.summariseProcedures) { - IRTransform - .GenerateProcedureSummaries(q.loading.parameterForm || conf.simplify)(ctx, AnalysisManager(ctx.program)) - } + if q.summariseProcedures then genProcSummaries(ctx, analysisManager) if (q.runInterpret) { Logger.info("Start interpret") @@ -944,14 +786,9 @@ object RunUtils { } } - IRTransform.PrepareForTranslation(q)(ctx, AnalysisManager(ctx.program)) + prepareForTranslation(ctx, analysisManager) - if (conf.generateRelyGuarantees) { - IRTransform.GenerateRgConditions(ctx.program.procedures.toList.filter(p => p.returnBlock != None))( - ctx, - AnalysisManager(ctx.program) - ) - } + if (conf.generateRelyGuarantees) then genRgConditions(ctx, analysisManager) q.loading.dumpIL.foreach(s => { writeToFile(pp_prog(ctx.program), s"$s-output.il") diff --git a/src/main/scala/util/Runner.scala b/src/main/scala/util/Runner.scala deleted file mode 100644 index 22138ce35c..0000000000 --- a/src/main/scala/util/Runner.scala +++ /dev/null @@ -1,419 +0,0 @@ -package util - -import ir.* -import ir.transforms.* -import analysis.* - -/* - -class DoSimplify(config: BASILConfig) extends TransformBatch("DoSimplify", List( - // (?) removed logger command: Logger.info("[!] Running Simplify") - IdentifyLoops(), - NormaliseBlockNames(), - SortProcedures(), - LiftSvComp(), - DumpIlBeforeSimp(config), - ApplyRpo(), - RemoveEmptyBlocks(), - CoalesceBlocks(), - RemoveEmptyBlocks(), - LogBlockgraphBeforeDsa(config), - LogIrBeforeDsa(config), - OnePassDsa(), - InlinePLTLaunchpad(), - RemoveEmptyBlocks(), - LogBlockgraphAfterDsa(config), - LogIrAfterDsa(config), - LogDsaResultsAndDoChecks(), - LogIrBeforeCopyProp(), - LogBlockGraphBeforeCopyProp(), - CopyProp(), - FixUpGuards(), - RemoveDuplicateGuards(), - LogBlockGraphAfterSimp(), - LiftLinuxAssertFail(), - LogIrAfterCopyProp(), - DsaCheck(), - LogIrAfterSlices(), - LogSimplificationValidation() -)) - */ -class IdentifyLoops extends Transform("IdentifyLoops") { - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer[?]] = { - val foundLoops = LoopDetector.identify_loops(ctx.program) - val newLoops = foundLoops.reducibleTransformIR() - newLoops.updateIrWithLoops() - Set.empty - } -} -/* -class NormaliseBlockNames extends Transform("NormaliseBlockNames") { - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { - ctx.program.procedures.foreach(_.normaliseBlockNames()) - Set.empty - } -} - -class SortProcedures extends Transform("SortProcedures") { - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { - ctx.program.sortProceduresRPO() - Set.empty - } -} - -class LiftSvComp extends Transform("LiftSvComp") { - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { - transforms.liftSVComp(ctx.program) - Set.empty - } -} - -// ??? -class DumpIlBeforeSimp(config: BASILConfig) extends Transform("DumpIlBeforeSimp") { - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { - config.foreach { - _.dumpILToPath.foreach { s => - DebugDumpIRLogger.writeToFile(File(s"${s}_il-before-simp.il"), pp_prog(ctx.program)) - } - } - Set.empty - } -} - -class RemoveEmptyBlocks extends Transform("RemoveEmptyBlocks") { - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { - transforms.removeEmptyBlocks(ctx.program) - Set.empty - } -} - -class LogBlockgraphBeforeDsa(config: BASILConfig) extends Transform("LogBlockgraphBeforeDsa") { - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { - config.foreach { - _.analysisDotPath.foreach { s => - DebugDumpIRLogger.writeToFile(File(s"${s}_blockgraph-before-dsa.dot"), dotBlockGraph(ctx.program.mainProcedure)) - } - } - Set.empty - } -} - -class LogIrBeforeDsa(config: BASILConfig) extends Transform("LogIrBeforeDsa") { - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { - config.foreach { - _.dumpILToPath.foreach { s => - DebugDumpIRLogger.writeToFile(File(s"${s}_il-before-dsa.il"), pp_prog(ctx.program)) - } - } - Set.empty - } -} - -class LogBlockgraphAfterDsa(config: BASILConfig) extends Transform("LogBlockgraphAfterDsa") { - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { - config.foreach { - _.analysisDotPath.foreach { s => - AnalysisResultDotLogger.writeToFile( - File(s"${s}_blockgraph-after-dsa.dot"), - dotBlockGraph( - program, - (program.collect { case b: Block => - b -> pp_block(b) - }).toMap - ) - ) - } - } - Set.empty - } -} - -class LogIrAfterDsa(config: BASILConfig) extends Transform("LogIrAfterDsa") { - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { - config.foreach { - _.dumpILToPath.foreach { s => - DebugDumpIRLogger.writeToFile(File(s"${s}_il-after-dsa.il"), pp_prog(program)) - } - } - Set.empty - } -} - -class OnePassDsa extends Transform("OnePassDsa") { - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { - transforms.OnePassDSA().applyTransform(ctx.program) - Set.empty - } -} - -class InlinePLTLaunchpad extends Transform("InlinePLTLaunchpad") { - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { - transforms.inlinePLTLaunchpad(ctx.program) - Set.empty - } -} - -class LogDsaResultsAndDoChecks extends Transform("LogDsaResultsAndDoChecks") { - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { - if (ir.eval.SimplifyValidation.validate) { - Logger.info("DSA no uninitialised") - assert(invariant.allVariablesAssignedIndex(ctx.program)) - // Logger.info("Live vars difftest") - // val tipLiveVars : Map[CFGPosition, Set[Variable]] = analysis.IntraLiveVarsAnalysis(program).analyze() - // assert(program.procedures.forall(transforms.difftestLiveVars(_, tipLiveVars))) - - Logger.info("DSA Check") - val x = program.procedures.forall(transforms.rdDSAProperty) - assert(x) - Logger.info("DSA Check passed") - assert(invariant.singleCallBlockEnd(program)) - assert(invariant.cfgCorrect(program)) - assert(invariant.blocksUniqueToEachProcedure(program)) - } - Set.empty - } -} - -class LogIrBeforeCopyProp extends Transform("LogIrBeforeCopyProp") { - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { - config.foreach { - _.dumpILToPath.foreach { s => - DebugDumpIRLogger.writeToFile(File(s"${s}_il-before-copyprop.il"), pp_prog(ctx.program)) - } - } - Set.empty - } -} - -class LogBlockGraphBeforeCopyProp extends Transform("LogBlockGraphBeforeCopyProp") { - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { - config.foreach { - _.analysisDotPath.foreach { s => - AnalysisResultDotLogger.writeToFile( - File(s"${s}_blockgraph-before-copyprop.dot"), - dotBlockGraph(ctx.program.mainProcedure) - ) - } - } - Set.empty - } -} - -class CopyProp extends Transform("CopyProp") { - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { - Logger.info("Copyprop Start") - transforms.copyPropParamFixedPoint(ctx.program, ctx.globalOffsets) - Set.empty - } -} - -class FixUpGuards extends Transforms("FixUpGuards") { - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { - transforms.fixupGuards(ctx.program) - Set.empty - } -} - -class RemoveDuplicateGuards extends Transforms("RemoveDuplicateGuards") { - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { - transforms.removeDuplicateGuard(ctx.program) - Set.empty - } -} - -class LogBlockGraphAfterSimp extends Transform("LogBlockGraphAfterSimp") { - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { - config.foreach { - _.analysisDotPath.foreach { s => - AnalysisResultDotLogger.writeToFile( - File(s"${s}_blockgraph-after-simp.dot"), - dotBlockGraph(program.mainProcedure) - ) - } - } - Set.empty - } -} - -class LiftLinuxAssertFail extends Transform("LogBlockGraphAfterSimp") { - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { - transforms.liftLinuxAssertFail(ctx) - assert(invariant.blockUniqueLabels(ctx.program)) // ??? should this be here? - // (?) removed command: Logger.info(s"CopyProp ${timer.checkPoint("Simplify")} ms ") - Set.empty - } -} - -class LogIrAfterCopyProp extends Transform("LogBlockGraphAfterSimp") { - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { - config.foreach { - _.dumpILToPath.foreach { s => - DebugDumpIRLogger.writeToFile(File(s"${s}_il-after-copyprop.il"), pp_prog(program)) - } - } - Set.empty - } -} - -class DsaCheck extends Transform("LogBlockGraphAfterSimp") { - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { - if (ir.eval.SimplifyValidation.validate) { - Logger.info("DSA Check (after transform)") - val x = program.procedures.forall(transforms.rdDSAProperty) - assert(x) - Logger.info("DSA Check succeeded") - } - Set.empty - } -} - -class LogIrAfterSlices extends Transform("LogBlockGraphAfterSimp") { - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { - config.foreach { - _.dumpILToPath.foreach { s => - DebugDumpIRLogger.writeToFile(File(s"${s}_il-after-slices.il"), pp_prog(program)) - } - } - Set.empty - } -} - -class LogSimplificationValidation extends Transform("LogSimplificationValidation") { - def implementation(ctx: IRContext, analyses: AnalysisManager): Set[analyses.Memoizer] = { - if (ir.eval.SimplifyValidation.validate) { - Logger.info("[!] Simplify :: Writing simplification validation") - val w = BufferedWriter(FileWriter("rewrites.smt2")) - ir.eval.SimplifyValidation.makeValidation(w) - w.close() - } - // (?) removed command: Logger.info("[!] Simplify :: finished") - Set.empty - } -} - - -// --- Static Analyses ------------------------------------------------------------------------------------------------- - -class IntraProcConstantPropagationAnalysis extends StaticAnalysis[ - Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]] -]("IntraProcConstantPropagation") { - def implementation(prog: Program, analyses: AnalysisManager) = IntraProcConstantPropagation(prog).analyze() -} - -class InterProcConstantPropagationAnalysis extends StaticAnalysis[ - Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]] -]("InterProcConstantPropagation") { - def implementation(prog: Program, analyses: AnalysisManager) = InterProcConstantPropagation(prog).analyze() -} - -class MemoryRegionAnalysisSolverAnalysis extends StaticAnalysis[ - Map[CFGPosition, ((Set[StackRegion], Set[Variable]), Set[HeapRegion])] -]("MemoryRegionAnalysis") { - def implementation(prog: Program, analyses: AnalysisManager) = - MemoryRegionAnalysisSolver( - prog, - analyses.intraDomain(), // computeDomain(IntraProcIRCursor, prog.procedures).toSet, - analyses.interProcConstProp(), - analyses.reachingDefs(), - analyses.graResult(), - analyses.mmmResults(), - analyses.vsaResult() - ).analyze() -} - -class ValueSetAnalysisSolverAnalysis extends StaticAnalysis[ - Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]] -]("VsaAnalysis") { - def implementation(prog: Program) = - ValueSetAnalysisSolverAnalysis(prog, analyses.mmmResults()).analyze() -} - -class InterLiveVarsAnalysis extends StaticAnalysis[ - Map[CFGPosition, Map[Variable, TwoElement]], -]("InterLiveVarsAnalysis") { - def implementation(prog: Program, analyses: AnalysisManager) = InterLiveVarsAnalysis(prog).analyze() -} - -class ParamResultsAnalysis extends StaticAnalysis[ - Map[Procedure, Set[Variable]] -]("ParamResultsAnalysis") { - def implementation(prog: Program, analyses: AnalysisManager) = ParamAnalysis(prog).analyze() -} - -class SteensgaardGetSolver extends StaticAnalysis[ - Map[RegisterWrapperEqualSets, Set[RegisterWrapperEqualSets | MemoryRegion]] -]("SteensgaardGetSolver") { - def implementation(prog: Program, analyses: AnalysisManager) = - val solver = InterprocSteensgaardAnalysis( - analyses.interDomain(), // computeDomain(InterProcIRCursor, prog.procedures) - analyses.mmmResults(), - analyses.ssaResults() - ) - solver.analyze() - solver // ??? -} - -class SteensgaardGetPointsTo extends StaticAnalysis[ - Map[RegisterWrapperEqualSets, Set[RegisterWrapperEqualSets | MemoryRegion]] -]("SteensgaardGetPointsTo") { - def implementation(prog: Program, analyses: AnalysisManager) = analyses.steensgaardSolver().pointsTo() -} - -class SteensgaardGetCallSiteSummary extends StaticAnalysis[ - mutable.Map[DirectCall, Map[RegisterWrapperEqualSets, Set[RegisterWrapperEqualSets | MemoryRegion]]] -]("SteensgaardGetCallSiteSummary") { - def implementation(prog: Program, analyses: AnalysisManager) = analyses.steensgaardSolver().callSiteSummary -} - -class MmmResults extends StaticAnalysis[ - MemoryModelMap -]("MmmResults") { - def implementation(prog: Program, analyses: AnalysisManager) = - val mmm = MemoryModelMap( - globalOffsets, - mergedSubroutines, - globalAddresses, - globalSizes - ) - mmm.preLoadGlobals() - mmm.setCallSiteSummaries(steensgaardSolver.callSiteSummary) - mmm - -} - -class reachingDefs extends StaticAnalysis("ReachingDefs") { - ReturnType = Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])] - def implementation(prog: Program, analyses: AnalysisManager) = _(prog).analyze() -} - -class regionInjector extends StaticAnalysis[Option[RegionInjector]]("RegionInjector") { - def implementation(prog: Program, analyses: AnalysisManager) = _(prog).analyze() -} - -class symbolicAddresses extends StaticAnalysis[Map[CFGPosition, Map[SymbolicAddress, TwoElement]]]("SymbolicAddresses") { - def implementation(prog: Program, analyses: AnalysisManager) = _(prog).analyze() -} - -class localDSA extends StaticAnalysis[Map[Procedure, Graph]]("LocalDSA") { - def implementation(prog: Program, analyses: AnalysisManager) = _(prog).analyze() -} - -class bottomUpDSA extends StaticAnalysis[Map[Procedure, Graph]]("BottomUpDSA") { - def implementation(prog: Program, analyses: AnalysisManager) = _(prog).analyze() -} - -class topDownDSA extends StaticAnalysis[Map[Procedure, Graph]]("TopDownDSA") { - def implementation(prog: Program, analyses: AnalysisManager) = _(prog).analyze() -} - -class writesToResult extends StaticAnalysis[Map[Procedure, Set[Register]]]("WritesTo") { - def implementation(prog: Program, analyses: AnalysisManager) = _(prog).analyze() -} - -class ssaResults extends StaticAnalysis[ - Map[CFGPosition, (Map[Variable, FlatElement[Int]], Map[Variable, FlatElement[Int]])] -]("SSA") { - def implementation(prog: Program, analyses: AnalysisManager) = _(prog).analyze() -} - - */ diff --git a/src/test/scala/ConditionLiftingTests.scala b/src/test/scala/ConditionLiftingTests.scala index a9ca36a926..067d5b511f 100644 --- a/src/test/scala/ConditionLiftingTests.scala +++ b/src/test/scala/ConditionLiftingTests.scala @@ -3,7 +3,6 @@ import org.scalatest.matchers.should.Matchers import scala.util.{Try, Failure, Success} import java.io.OutputStream import translating.PrettyPrinter.* -import util.IRTransform import analysis.AnalysisManager import ir.* @@ -825,7 +824,8 @@ class ConditionLiftingRegressionTest extends AnyFunSuite with test_util.CaptureO test("conds inline test") { var ctx = util.IRLoading.load(testProgram) - IRTransform.DoCleanup(true)(ctx, AnalysisManager(ctx.program)) + + ir.transforms.doCleanupWithSimplify(ctx, AnalysisManager(ctx.program)) ir.transforms.clearParams(ctx.program) ctx = ir.transforms.liftProcedureCallAbstraction(ctx) util.RunUtils.doSimplify(ctx, None) diff --git a/src/test/scala/DifferentialAnalysisTest.scala b/src/test/scala/DifferentialAnalysisTest.scala index 1a2dda13b3..46a7cf9cdf 100644 --- a/src/test/scala/DifferentialAnalysisTest.scala +++ b/src/test/scala/DifferentialAnalysisTest.scala @@ -3,7 +3,7 @@ import ir.eval.* import java.io.File import org.scalatest.* import org.scalatest.funsuite.* -import util.{IRLoading, ILLoadingConfig, IRContext, RunUtils, StaticAnalysisConfig, Logger, LogLevel, IRTransform} +import util.{IRLoading, ILLoadingConfig, IRContext, RunUtils, StaticAnalysisConfig, Logger, LogLevel} import ir.eval.ExecEffect import test_util.* import analysis.AnalysisManager @@ -90,10 +90,10 @@ abstract class DifferentialTest extends AnyFunSuite, CaptureOutput, TestCustomis ) var ictx = IRLoading.load(loading) - IRTransform.DoCleanup()(ictx, AnalysisManager(ictx.program)) + ir.transforms.doCleanupWithoutSimplify(ictx, AnalysisManager(ictx.program)) var comparectx = IRLoading.load(loading) - IRTransform.DoCleanup()(comparectx, AnalysisManager(comparectx.program)) + ir.transforms.doCleanupWithoutSimplify(comparectx, AnalysisManager(comparectx.program)) ir.transforms.clearParams(ictx.program) diff --git a/src/test/scala/InterpretTestConstProp.scala b/src/test/scala/InterpretTestConstProp.scala index 918456e7f3..c9a87f7446 100644 --- a/src/test/scala/InterpretTestConstProp.scala +++ b/src/test/scala/InterpretTestConstProp.scala @@ -13,7 +13,6 @@ import util.{ ILLoadingConfig, IRContext, IRLoading, - IRTransform, LogLevel, Logger, RunUtils, @@ -55,7 +54,7 @@ class InterpretTestConstProp val loading = ILLoadingConfig(inputFile = s"$path.adt", relfFile = s"$path.relf", dumpIL = None) val ictx = IRLoading.load(loading) - IRTransform.DoCleanup()(ictx, AnalysisManager(ictx.program)) + ir.transforms.doCleanupWithoutSimplify(ictx, AnalysisManager(ictx.program)) ir.transforms.clearParams(ictx.program) val analyses = RunUtils.staticAnalysis(StaticAnalysisConfig(None, None, None), ictx) diff --git a/src/test/scala/TestKnownBitsInterpreter.scala b/src/test/scala/TestKnownBitsInterpreter.scala index d9204a8622..ee6a845ac9 100644 --- a/src/test/scala/TestKnownBitsInterpreter.scala +++ b/src/test/scala/TestKnownBitsInterpreter.scala @@ -19,8 +19,7 @@ import util.{ StaticAnalysisContext, BASILResult, Logger, - LogLevel, - IRTransform + LogLevel } import ir.eval.{interpretTrace, interpret, ExecEffect, Stopped} import ir.dsl diff --git a/src/test/scala/ir/InterpreterTests.scala b/src/test/scala/ir/InterpreterTests.scala index 7bf3526e8b..b8cc9cf185 100644 --- a/src/test/scala/ir/InterpreterTests.scala +++ b/src/test/scala/ir/InterpreterTests.scala @@ -12,7 +12,7 @@ import boogie.SpecGlobal import translating.BAPToIR import util.{LogLevel, Logger} import util.IRLoading.{loadBAP, loadReadELF} -import util.{ILLoadingConfig, IRContext, IRLoading, IRTransform} +import util.{ILLoadingConfig, IRContext, IRLoading} import test_util.{BASILTest, CaptureOutput} import ir.dsl.given import ir.dsl.IfThenBlocks @@ -50,7 +50,7 @@ class InterpreterTests extends AnyFunSuite with CaptureOutput with BeforeAndAfte ) val ctx = IRLoading.load(loading) - IRTransform.DoCleanup()(ctx, AnalysisManager(ctx.program)) + ir.transforms.doCleanupWithoutSimplify(ctx, AnalysisManager(ctx.program)) ir.transforms.clearParams(ctx.program) // val bapProgram = loadBAP(loading.inputFile) From 7c839c651854238819354e8231022bdd08aaa676 Mon Sep 17 00:00:00 2001 From: James Tobler <64625414+j-tobler@users.noreply.github.com> Date: Fri, 20 Jun 2025 15:38:00 +1000 Subject: [PATCH 10/30] fixed compilation errors; fixed logic errors in transform: prepare for translation --- .../ir/transforms/PrepareForTranslation.scala | 15 +++++------ src/main/scala/ir/transforms/Simp.scala | 2 +- src/main/scala/util/RunUtils.scala | 25 ++++++++++++++----- 3 files changed, 28 insertions(+), 14 deletions(-) diff --git a/src/main/scala/ir/transforms/PrepareForTranslation.scala b/src/main/scala/ir/transforms/PrepareForTranslation.scala index 797cbef377..df4672ceff 100644 --- a/src/main/scala/ir/transforms/PrepareForTranslation.scala +++ b/src/main/scala/ir/transforms/PrepareForTranslation.scala @@ -1,7 +1,8 @@ package ir.transforms import scala.collection.mutable -import ir.{StackSubstituter, Renamer, Block} +import ir.Block +import ir.cilvisitor.* import ir.invariant import util.BASILConfig import analysis.AnalysisManager @@ -14,7 +15,7 @@ val determineRelevantMemory = SingleTransform("DetermineRelevantMemory", (ctx, m // run iff arg val stackSubstitution = SingleTransform("StackSubstitution", (ctx, man) => { - StackSubstituter().visitProgram(ctx.program) + visit_prog(StackSubstituter(), ctx.program) man.ClobberAll }) @@ -26,19 +27,19 @@ val setModifies = SingleTransform("SetModifies", (ctx, man) => { def getRenameBoogieKeywordsTransform(boogieReserved: Set[String]): Transform = SingleTransform("RenameBoogieKeywords", (ctx, man) => { - Renamer(boogieReserved).visitProgram(ctx.program) + visit_prog(BoogieReservedRenamer(boogieReserved), ctx.program) man.ClobberAll }) /** Cull unneccessary information that does not need to be included in the translation, and infer stack regions, and * add in modifies from the spec. */ -def getPrepareForTranslationTransform(config: BASILConfig, boogieReserved: Set[String]): Transform = TransformBatch( +def getPrepareForTranslationTransform(trimDepth: Int, boogieReserved: Set[String]): Transform = TransformBatch( "PrepareForTranslation", List( - determineRelevantMemory, // run iff config.staticAnalysis.isEmpty || (config.staticAnalysis.get.memoryRegions == MemoryRegionsMode.Disabled) - getStripUnreachableFunctionsTransform(config.loading.procedureTrimDepth), - stackSubstitution, // run iff !config.memoryTransform && (config.staticAnalysis.isEmpty || (config.staticAnalysis.get.memoryRegions == MemoryRegionsMode.Disabled)) + determineRelevantMemory, + getStripUnreachableFunctionsTransform(trimDepth), + stackSubstitution, setModifies, getRenameBoogieKeywordsTransform(boogieReserved: Set[String]) ), diff --git a/src/main/scala/ir/transforms/Simp.scala b/src/main/scala/ir/transforms/Simp.scala index 3ba64fe842..4360685747 100644 --- a/src/main/scala/ir/transforms/Simp.scala +++ b/src/main/scala/ir/transforms/Simp.scala @@ -1931,7 +1931,7 @@ def getDoCleanupTransform(doSimplify: Boolean): Transform = TransformBatch( getReplaceReturnsTransform(doSimplify), removeExternalFunctionReferences ), - notice = "Removing external function calls", // fixme: seems odd? + notice = "Removing external function calls", // fixme: is this all the cleanup is doing? postRunChecks = ctx => { assert(invariant.singleCallBlockEnd(ctx.program)) assert(invariant.cfgCorrect(ctx.program)) diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index 47859fadde..5eb34367db 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -716,20 +716,33 @@ object RunUtils { assert(invariant.cfgCorrect(ctx.program)) assert(invariant.blocksUniqueToEachProcedure(ctx.program)) + // derive transform configuration from basil config val analysisManager = AnalysisManager(ctx.program) + val disabledTransforms: mutable.Set[Transform] = mutable.Set.empty + val transformsToLog: mutable.Map[Transform, Set[Log]] = mutable.Map.empty + + if (!q.staticAnalysis.isEmpty && q.staticAnalysis.get.memoryRegions != MemoryRegionsMode.Disabled) { + disabledTransforms += determineRelevantMemory + } + if (q.memoryTransform || (!q.staticAnalysis.isEmpty && q.staticAnalysis.get.memoryRegions != MemoryRegionsMode.Disabled)) { + disabledTransforms += stackSubstitution + } + + // note: we might not necessarily want the same config for all transforms + val transformConfig = TransformConfig(disabled = disabledTransforms.toSet, dumpLogs = transformsToLog.toMap) // these transforms depend on basil config parameters and thus need to be constructed here - val prepareForTranslation = getPrepareForTranslationTransform(q, Set("free")) + val prepareForTranslation = getPrepareForTranslationTransform(q.loading.procedureTrimDepth, Set("free")) val genProcSummaries = getGenerateProcedureSummariesTransform(q.loading.parameterForm || conf.simplify) val genRgConditions = getGenerateRgConditionsTransform(ctx.program.procedures.toList.filter(_.returnBlock != None)) val stripUnreachableFunctions = getStripUnreachableFunctionsTransform(q.loading.procedureTrimDepth) - if conf.simplify then doCleanupWithSimplify(ctx, analysisManager) - else doCleanupWithoutSimplify(ctx, analysisManager) + if conf.simplify then doCleanupWithSimplify(ctx, analysisManager, transformConfig) + else doCleanupWithoutSimplify(ctx, analysisManager, transformConfig) - transforms.inlinePLTLaunchpad(ctx, analysisManager) + transforms.inlinePLTLaunchpad(ctx, analysisManager, transformConfig) - if (q.loading.trimEarly) then stripUnreachableFunctions(ctx, analysisManager) + if (q.loading.trimEarly) then stripUnreachableFunctions(ctx, analysisManager, transformConfig) // todo: since refactoring, there is some extra code that is run here // see StripUnreachableFunctions.getStripUnreachableFunctionsTransform @@ -828,7 +841,7 @@ object RunUtils { } } - prepareForTranslation(ctx, analysisManager) + prepareForTranslation(ctx, analysisManager, transformConfig) if (conf.generateRelyGuarantees) then genRgConditions(ctx, analysisManager) From a502e3fa39292e01b3ceb0cc8b62f38a59780673 Mon Sep 17 00:00:00 2001 From: James Tobler <64625414+j-tobler@users.noreply.github.com> Date: Fri, 20 Jun 2025 17:09:00 +1000 Subject: [PATCH 11/30] made some transforms lazy to fix nullptr exceptions - might change this in future --- src/main/scala/ir/transforms/Simp.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/scala/ir/transforms/Simp.scala b/src/main/scala/ir/transforms/Simp.scala index 4360685747..a8a08e1fb1 100644 --- a/src/main/scala/ir/transforms/Simp.scala +++ b/src/main/scala/ir/transforms/Simp.scala @@ -1941,8 +1941,8 @@ def getDoCleanupTransform(doSimplify: Boolean): Transform = TransformBatch( ) // these are called a lot so it's useful to create them here rather than generating many copies on the fly -val doCleanupWithSimplify = getDoCleanupTransform(true) -val doCleanupWithoutSimplify = getDoCleanupTransform(false) +lazy val doCleanupWithSimplify = getDoCleanupTransform(true) +lazy val doCleanupWithoutSimplify = getDoCleanupTransform(false) class Simplify(val res: Boolean => Variable => Option[Expr], val initialBlock: Block = null) extends CILVisitor { From 44eed4509e09ede4e8a8b15384ac3d24f38d0e4d Mon Sep 17 00:00:00 2001 From: James Tobler <64625414+j-tobler@users.noreply.github.com> Date: Tue, 24 Jun 2025 11:20:17 +1000 Subject: [PATCH 12/30] scalafmt --- .../ir/transforms/InlinePLTLaunchpad.scala | 33 ++++---- .../ir/transforms/PrepareForTranslation.scala | 46 +++++++---- .../scala/ir/transforms/ReplaceReturn.scala | 11 ++- src/main/scala/ir/transforms/Simp.scala | 79 +++++++++++-------- src/main/scala/ir/transforms/Transform.scala | 11 +-- src/main/scala/util/RunUtils.scala | 6 +- src/test/scala/DifferentialAnalysisTest.scala | 2 +- 7 files changed, 110 insertions(+), 78 deletions(-) diff --git a/src/main/scala/ir/transforms/InlinePLTLaunchpad.scala b/src/main/scala/ir/transforms/InlinePLTLaunchpad.scala index 138ce40e4a..41815a1605 100644 --- a/src/main/scala/ir/transforms/InlinePLTLaunchpad.scala +++ b/src/main/scala/ir/transforms/InlinePLTLaunchpad.scala @@ -2,22 +2,25 @@ package ir.transforms import ir.{Program, Procedure} import ir.cilvisitor.visit_prog -val inlinePLTLaunchpad = SingleTransform("InlinePLTLaunchpad", (ctx, man) => { - ctx.program.sortProceduresRPO() +val inlinePLTLaunchpad = SingleTransform( + "InlinePLTLaunchpad", + (ctx, man) => { + ctx.program.sortProceduresRPO() - def candidate(p: Procedure): Boolean = - (p.blocks.size <= 4) - && p.calls.size == 1 - && p.calls.forall(_.isExternal.contains(true)) - && p.procName.startsWith("FUN") - && !p.calls.contains(p) + def candidate(p: Procedure): Boolean = + (p.blocks.size <= 4) + && p.calls.size == 1 + && p.calls.forall(_.isExternal.contains(true)) + && p.procName.startsWith("FUN") + && !p.calls.contains(p) - for (p <- ctx.program.procedures.reverse.filter(candidate)) { - p.incomingCalls().foreach { call => - inlineCall(ctx.program, call) + for (p <- ctx.program.procedures.reverse.filter(candidate)) { + p.incomingCalls().foreach { call => + inlineCall(ctx.program, call) + } } - } - applyRPO(ctx.program) - man.ClobberAll -}) + applyRPO(ctx.program) + man.ClobberAll + } +) diff --git a/src/main/scala/ir/transforms/PrepareForTranslation.scala b/src/main/scala/ir/transforms/PrepareForTranslation.scala index df4672ceff..21308be8fa 100644 --- a/src/main/scala/ir/transforms/PrepareForTranslation.scala +++ b/src/main/scala/ir/transforms/PrepareForTranslation.scala @@ -8,28 +8,40 @@ import util.BASILConfig import analysis.AnalysisManager // run iff arg.isEmpty || (arg.get.memoryRegions == MemoryRegionsMode.Disabled) -val determineRelevantMemory = SingleTransform("DetermineRelevantMemory", (ctx, man) => { - ctx.program.determineRelevantMemory(ctx.globalOffsets) - man.ClobberAll -}) +val determineRelevantMemory = SingleTransform( + "DetermineRelevantMemory", + (ctx, man) => { + ctx.program.determineRelevantMemory(ctx.globalOffsets) + man.ClobberAll + } +) // run iff arg -val stackSubstitution = SingleTransform("StackSubstitution", (ctx, man) => { - visit_prog(StackSubstituter(), ctx.program) - man.ClobberAll -}) +val stackSubstitution = SingleTransform( + "StackSubstitution", + (ctx, man) => { + visit_prog(StackSubstituter(), ctx.program) + man.ClobberAll + } +) -val setModifies = SingleTransform("SetModifies", (ctx, man) => { - val specModifies = ctx.specification.subroutines.map(s => s.name -> s.modifies).toMap - ctx.program.setModifies(specModifies) - man.ClobberAll -}) +val setModifies = SingleTransform( + "SetModifies", + (ctx, man) => { + val specModifies = ctx.specification.subroutines.map(s => s.name -> s.modifies).toMap + ctx.program.setModifies(specModifies) + man.ClobberAll + } +) def getRenameBoogieKeywordsTransform(boogieReserved: Set[String]): Transform = - SingleTransform("RenameBoogieKeywords", (ctx, man) => { - visit_prog(BoogieReservedRenamer(boogieReserved), ctx.program) - man.ClobberAll - }) + SingleTransform( + "RenameBoogieKeywords", + (ctx, man) => { + visit_prog(BoogieReservedRenamer(boogieReserved), ctx.program) + man.ClobberAll + } + ) /** Cull unneccessary information that does not need to be included in the translation, and infer stack regions, and * add in modifies from the spec. diff --git a/src/main/scala/ir/transforms/ReplaceReturn.scala b/src/main/scala/ir/transforms/ReplaceReturn.scala index 99fdc56740..2465eaca2e 100644 --- a/src/main/scala/ir/transforms/ReplaceReturn.scala +++ b/src/main/scala/ir/transforms/ReplaceReturn.scala @@ -143,7 +143,10 @@ def establishProcedureDiamondForm(program: Program, doSimplify: Boolean = false) } def getEstablishProcedureDiamondFormTransform(doSimplify: Boolean): Transform = - SingleTransform("EstablishProcedureDiamondForm", (ctx, man) => { - establishProcedureDiamondForm(ctx.program, doSimplify) - man.ClobberAll - }) + SingleTransform( + "EstablishProcedureDiamondForm", + (ctx, man) => { + establishProcedureDiamondForm(ctx.program, doSimplify) + man.ClobberAll + } + ) diff --git a/src/main/scala/ir/transforms/Simp.scala b/src/main/scala/ir/transforms/Simp.scala index ad7cfa8793..8e75025b54 100644 --- a/src/main/scala/ir/transforms/Simp.scala +++ b/src/main/scala/ir/transforms/Simp.scala @@ -829,15 +829,21 @@ def coalesceBlocks(p: Program): Boolean = { didAny } -val coalesceBlocksOnce = SingleTransform("CoalesceBlocksOnce", (ctx, man) => { - coalesceBlocks(ctx.program) - man.ClobberAll -}) +val coalesceBlocksOnce = SingleTransform( + "CoalesceBlocksOnce", + (ctx, man) => { + coalesceBlocks(ctx.program) + man.ClobberAll + } +) -val coalesceBlocksFixpoint = SingleTransform("CoalesceBlocksFixpoint", (ctx, man) => { - while (coalesceBlocks(ctx.program)) {} - man.ClobberAll -}) +val coalesceBlocksFixpoint = SingleTransform( + "CoalesceBlocksFixpoint", + (ctx, man) => { + while (coalesceBlocks(ctx.program)) {} + man.ClobberAll + } +) def removeDeadInParams(p: Program): Boolean = { var modified = false @@ -1149,10 +1155,13 @@ def applyRPO(p: Program) = { } } -val applyRpoTransform = SingleTransform("ApplyRPO", (ctx, man) => { - applyRPO(man.program) - man.ClobberAll -}) +val applyRpoTransform = SingleTransform( + "ApplyRPO", + (ctx, man) => { + applyRPO(man.program) + man.ClobberAll + } +) object getProcFrame { class GetProcFrame(frames: Procedure => Set[Memory]) extends CILVisitor { @@ -1887,27 +1896,33 @@ def findDefinitelyExits(p: Program): ProcReturnInfo = { ) } -val replaceJumpsInNonReturningProcs = SingleTransform("ReplaceJumpsInNonReturningProcs", (ctx, man) => { - val nonReturning = findDefinitelyExits(ctx.program) - ctx.program.mainProcedure.foreach { - case d: DirectCall if nonReturning.nonreturning.contains(d.target) => d.parent.replaceJump(Return()) - case _ => - } - man.ClobberAll -}) - -val removeExternalFunctionReferences = SingleTransform("RemoveExternalFunctionReferences", (ctx, man) => { - val externalNames = ctx.externalFunctions.map(_.name) - val unqualifiedNames = externalNames.filter(_.contains('@')).map(_.split('@')(0)) - removeBodyOfExternal(externalNames ++ unqualifiedNames)(ctx.program) - for (p <- ctx.program.procedures) { - p.isExternal = Some( - ctx.externalFunctions.exists(e => e.name == p.procName || p.address.contains(e.offset)) || p.isExternal - .getOrElse(false) - ) +val replaceJumpsInNonReturningProcs = SingleTransform( + "ReplaceJumpsInNonReturningProcs", + (ctx, man) => { + val nonReturning = findDefinitelyExits(ctx.program) + ctx.program.mainProcedure.foreach { + case d: DirectCall if nonReturning.nonreturning.contains(d.target) => d.parent.replaceJump(Return()) + case _ => + } + man.ClobberAll + } +) + +val removeExternalFunctionReferences = SingleTransform( + "RemoveExternalFunctionReferences", + (ctx, man) => { + val externalNames = ctx.externalFunctions.map(_.name) + val unqualifiedNames = externalNames.filter(_.contains('@')).map(_.split('@')(0)) + removeBodyOfExternal(externalNames ++ unqualifiedNames)(ctx.program) + for (p <- ctx.program.procedures) { + p.isExternal = Some( + ctx.externalFunctions.exists(e => e.name == p.procName || p.address.contains(e.offset)) || p.isExternal + .getOrElse(false) + ) + } + man.ClobberAll } - man.ClobberAll -}) +) def getDoCleanupTransform(doSimplify: Boolean): Transform = TransformBatch( "DoCleanup", diff --git a/src/main/scala/ir/transforms/Transform.scala b/src/main/scala/ir/transforms/Transform.scala index 93695826a0..1c9719a6fe 100644 --- a/src/main/scala/ir/transforms/Transform.scala +++ b/src/main/scala/ir/transforms/Transform.scala @@ -7,7 +7,6 @@ import analysis.AnalysisManager import translating.PrettyPrinter.pp_prog import java.io.File - // TransformConfig accepts Log instances which specify what kind of logs to dump for particular transforms trait Log { def dump(ctx: IRContext, transformName: String): Unit @@ -18,7 +17,8 @@ case class BlockgraphLog(filenamePrefix: String) extends Log { def dump(ctx: IRContext, transformName: String): Unit = DebugDumpIRLogger.writeToFile( File(s"${filenamePrefix}_blockgraph-${transformName}.dot"), - dotBlockGraph(ctx.program.mainProcedure)) + dotBlockGraph(ctx.program.mainProcedure) + ) } // dumps an IR log @@ -32,10 +32,7 @@ case class IrLog(filenamePrefix: String) extends Log { * @param disabled Optionally specify a set of transforms to disable. * @param dumpLogs Optionally specify which logs to dump for which transforms, if any. */ -case class TransformConfig( - disabled: Set[Transform] = Set.empty, - dumpLogs: Map[Transform, Set[Log]] = Map.empty -) +case class TransformConfig(disabled: Set[Transform] = Set.empty, dumpLogs: Map[Transform, Set[Log]] = Map.empty) // default value for transforms val emptyConfig = TransformConfig() @@ -93,7 +90,7 @@ case class SingleTransform( // simply calls the given implementation function def transform(ctx: IRContext, man: AnalysisManager, config: TransformConfig): man.Invalidation = implementation(ctx, man) - + // standard transforms don't need anything here; post-run checks should be handled by the implementation def postRun(ctx: IRContext): Unit = () } diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index 52a58a42dd..7ee7b96f9f 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -734,10 +734,12 @@ object RunUtils { if (!q.staticAnalysis.isEmpty && q.staticAnalysis.get.memoryRegions != MemoryRegionsMode.Disabled) { disabledTransforms += determineRelevantMemory } - if (q.memoryTransform || (!q.staticAnalysis.isEmpty && q.staticAnalysis.get.memoryRegions != MemoryRegionsMode.Disabled)) { + if ( + q.memoryTransform || (!q.staticAnalysis.isEmpty && q.staticAnalysis.get.memoryRegions != MemoryRegionsMode.Disabled) + ) { disabledTransforms += stackSubstitution } - + // note: we might not necessarily want the same config for all transforms val transformConfig = TransformConfig(disabled = disabledTransforms.toSet, dumpLogs = transformsToLog.toMap) diff --git a/src/test/scala/DifferentialAnalysisTest.scala b/src/test/scala/DifferentialAnalysisTest.scala index 6cc9532131..7b470f2703 100644 --- a/src/test/scala/DifferentialAnalysisTest.scala +++ b/src/test/scala/DifferentialAnalysisTest.scala @@ -93,7 +93,7 @@ abstract class DifferentialTest extends AnyFunSuite, CaptureOutput, TestCustomis ir.transforms.doCleanupWithoutSimplify(ictx, AnalysisManager(ictx.program)) var comparectx = IRLoading.load(loading) - ir.transforms.doCleanupWithoutSimplify(comparectx, AnalysisManager(comparectx.program)) + ir.transforms.doCleanupWithoutSimplify(comparectx, AnalysisManager(comparectx.program)) ir.transforms.clearParams(ictx.program) From c5922a6bd1fb85115ee290d454f03877cd35a3fd Mon Sep 17 00:00:00 2001 From: James Tobler <64625414+j-tobler@users.noreply.github.com> Date: Tue, 24 Jun 2025 15:16:02 +1000 Subject: [PATCH 13/30] added (tentative) transform for doSimplify --- src/main/scala/ir/transforms/Simp.scala | 248 +++++++++++++++++++++++- 1 file changed, 247 insertions(+), 1 deletion(-) diff --git a/src/main/scala/ir/transforms/Simp.scala b/src/main/scala/ir/transforms/Simp.scala index 8e75025b54..4d79b6be60 100644 --- a/src/main/scala/ir/transforms/Simp.scala +++ b/src/main/scala/ir/transforms/Simp.scala @@ -1,7 +1,7 @@ package ir.transforms import translating.PrettyPrinter.* import specification.FuncEntry -import util.SimplifyLogger +import util.{SimplifyLogger, Logger} import ir.eval.AlgebraicSimplifications import ir.eval.AssumeConditionSimplifications import ir.eval.simplifyExprFixpoint @@ -15,6 +15,7 @@ import scala.util.{Failure, Success} import ExecutionContext.Implicits.global import scala.util.boundary, boundary.break import util.IRContext +import java.io.{BufferedWriter, FileWriter} /** Simplification pass, see also: docs/development/simplification-solvers.md */ @@ -2175,3 +2176,248 @@ val makeProcEntriesNonLoops = SingleTransform( man.ClobberAll } ) + +// --- DoSimplify ------------------------------------------------------------------------------------------------------ + +/* For the doSimplify transform, I've put all the relevant sub-transforms here. We might want to move these in +future, or merge them with the functions they call. */ + +val reduceLoops = SingleTransform( + "ReduceLoops", + (ctx, man) => { + val foundLoops = LoopDetector.identify_loops(ctx.program) + val newLoops = foundLoops.reducibleTransformIR() + newLoops.updateIrWithLoops() + man.ClobberAll + } +) + +val normaliseBlockNamesTransform = SingleTransform( + "NormaliseBlockNames", + (ctx, man) => { + for (p <- ctx.program.procedures) { + p.normaliseBlockNames() + } + man.ClobberAll + } +) + +val sortProceduresRpoTransform = SingleTransform( + "NormaliseBlockNames", + (ctx, man) => { + ctx.program.sortProceduresRPO() + man.ClobberAll + } +) + +val liftSvCompTransform = SingleTransform( + "LiftSvComp", + (ctx, man) => { + liftSVComp(ctx.program) + man.ClobberAll + } +) + +val removeEmptyBlocksTransform = SingleTransform( + "RemoveEmptyBlocks", + (ctx, man) => { + removeEmptyBlocks(ctx.program) + man.ClobberAll + } +) + +val onePassDsaTransform = SingleTransform( + "OnePassDsa", + (ctx, man) => { + OnePassDSA().applyTransform(ctx.program) + man.ClobberAll + } +) + +// fixme: this is not really a transform, but a check on the ir +val dsaCheck = SingleTransform( + "DsaCheck", + (ctx, man) => { + Logger.info("DSA no uninitialised") + assert(invariant.allVariablesAssignedIndex(ctx.program)) + // Logger.info("Live vars difftest") + // val tipLiveVars : Map[CFGPosition, Set[Variable]] = analysis.IntraLiveVarsAnalysis(ctx.program).analyze() + // assert(ctx.program.procedures.forall(transforms.difftestLiveVars(_, tipLiveVars))) + + Logger.info("DSA Check") + val x = ctx.program.procedures.forall(rdDSAProperty) + assert(x) + Logger.info("DSA Check passed") + assert(invariant.singleCallBlockEnd(ctx.program)) + assert(invariant.cfgCorrect(ctx.program)) + assert(invariant.blocksUniqueToEachProcedure(ctx.program)) + man.PreserveAll + } +) + +// fixme: similar issues to the above transform +val dsaCheckAfterTransform = SingleTransform( + "DsaCheckAfterTransform", + (ctx, man) => { + Logger.info("DSA Check (after transform)") + val x = ctx.program.procedures.forall(rdDSAProperty) + assert(x) + Logger.info("DSA Check succeeded") + man.PreserveAll + } +) + +// fixme: similar issues to the above +// we might want to move this out of the transform, to the callsite +val logSimplificationValidation = SingleTransform( + "LogSimplificationValidation", + (ctx, man) => { + Logger.info("[!] Simplify :: Writing simplification validation") + val w = BufferedWriter(FileWriter("rewrites.smt2")) + ir.eval.SimplifyValidation.makeValidation(w) + w.close() + man.PreserveAll + } +) + + +val copyPropParamFixedPointTransform = SingleTransform( + "CopyPropParamFixedPoint", + (ctx, man) => { + copyPropParamFixedPoint(ctx.program, ctx.globalOffsets) + man.ClobberAll + }, + notice = "Copyprop Start" +) + +val fixupGuardsTransform = SingleTransform( + "FixUpGuards", + (ctx, man) => { + fixupGuards(ctx.program) + man.ClobberAll + } +) + +val removeDuplicateGuardsTransform = SingleTransform( + "RemoveDuplicateGuards", + (ctx, man) => { + removeDuplicateGuard(ctx.program) + man.ClobberAll + } +) + +val liftLinuxAssertFailTransform = SingleTransform( + "LiftLinuxAssertFail", + (ctx, man) => { + liftLinuxAssertFail(ctx) + man.ClobberAll + } +) + +def getDoSimplifyTransform(validate: Boolean) = TransformBatch( + "DoSimplify", + List( + reduceLoops, + normaliseBlockNamesTransform, + sortProceduresRpoTransform, + liftSvCompTransform, + /* + config.foreach { + _.dumpILToPath.foreach { s => + DebugDumpIRLogger.writeToFile(File(s"${s}_il-before-simp.il"), pp_prog(program)) + } + } + */ + applyRpoTransform, // (this transform was already defined in this file) + // example of printing a simple analysis + removeEmptyBlocksTransform, + coalesceBlocksOnce, + removeEmptyBlocksTransform, + // transforms.coalesceBlocksCrossBranchDependency(program) + /* + config.foreach { + _.analysisDotPath.foreach { s => + DebugDumpIRLogger.writeToFile(File(s"${s}_blockgraph-before-dsa.dot"), dotBlockGraph(program.mainProcedure)) + } + } + Logger.info("[!] Simplify :: DynamicSingleAssignment") + config.foreach { + _.dumpILToPath.foreach { s => + DebugDumpIRLogger.writeToFile(File(s"${s}_il-before-dsa.il"), pp_prog(program)) + } + } + */ + onePassDsaTransform, + inlinePLTLaunchpad, + removeEmptyBlocksTransform, + /* + config.foreach { + _.analysisDotPath.foreach { s => + AnalysisResultDotLogger.writeToFile( + File(s"${s}_blockgraph-after-dsa.dot"), + dotBlockGraph( + program, + (program.collect { case b: Block => + b -> pp_block(b) + }).toMap + ) + ) + } + } + config.foreach { + _.dumpILToPath.foreach { s => + DebugDumpIRLogger.writeToFile(File(s"${s}_il-after-dsa.il"), pp_prog(program)) + } + } + */ + // todo: only run iff ir.eval.SimplifyValidation.validate (that is: iff conf.validateSimp) + dsaCheck, + // todo: + // if config is set and config.dumpILToPath is set then dump il + // if config is set and config.analysisDotPath is set then dump blockgraph + copyPropParamFixedPointTransform, + fixupGuardsTransform, + removeDuplicateGuardsTransform, + /* + config.foreach { + _.analysisDotPath.foreach { s => + AnalysisResultDotLogger.writeToFile( + File(s"${s}_blockgraph-after-simp.dot"), + dotBlockGraph(program.mainProcedure) + ) + } + } + */ + liftLinuxAssertFailTransform, + // assert(program.procedures.forall(transforms.rdDSAProperty)) + // todo: transforms should have the ability to log their performance as soon as they finish + /* + assert(invariant.blockUniqueLabels(program)) + Logger.info(s"CopyProp ${timer.checkPoint("Simplify")} ms ") + + config.foreach { + _.dumpILToPath.foreach { s => + DebugDumpIRLogger.writeToFile(File(s"${s}_il-after-copyprop.il"), pp_prog(program)) + } + } + */ + // val x = program.procedures.forall(transforms.rdDSAProperty) + // assert(x) + dsaCheckAfterTransform, // todo: only run iff conf.validateSimp + // run this after cond recovery because sign bit calculations often need high bits + // which go away in high level conss + /* + config.foreach { + _.dumpILToPath.foreach { s => + DebugDumpIRLogger.writeToFile(File(s"${s}_il-after-slices.il"), pp_prog(program)) + } + } + */ + // re-apply dsa + // transforms.OnePassDSA().applyTransform(program) + logSimplificationValidation // todo: only run iff conf.validateSimp + ), + notice = "Running Simplify", + // fixme: not an appropriate use of this field + postRunChecks = _ => Logger.info("[!] Simplify :: finished") +) From 8513cce7efcb3ec94db79843fce6b92b914e558d Mon Sep 17 00:00:00 2001 From: James Tobler <64625414+j-tobler@users.noreply.github.com> Date: Tue, 24 Jun 2025 15:19:36 +1000 Subject: [PATCH 14/30] scalafmt --- src/main/scala/ir/transforms/Simp.scala | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/src/main/scala/ir/transforms/Simp.scala b/src/main/scala/ir/transforms/Simp.scala index 4d79b6be60..4ea37b22d8 100644 --- a/src/main/scala/ir/transforms/Simp.scala +++ b/src/main/scala/ir/transforms/Simp.scala @@ -2280,7 +2280,6 @@ val logSimplificationValidation = SingleTransform( } ) - val copyPropParamFixedPointTransform = SingleTransform( "CopyPropParamFixedPoint", (ctx, man) => { @@ -2327,14 +2326,14 @@ def getDoSimplifyTransform(validate: Boolean) = TransformBatch( DebugDumpIRLogger.writeToFile(File(s"${s}_il-before-simp.il"), pp_prog(program)) } } - */ + */ applyRpoTransform, // (this transform was already defined in this file) // example of printing a simple analysis removeEmptyBlocksTransform, coalesceBlocksOnce, removeEmptyBlocksTransform, // transforms.coalesceBlocksCrossBranchDependency(program) - /* + /* config.foreach { _.analysisDotPath.foreach { s => DebugDumpIRLogger.writeToFile(File(s"${s}_blockgraph-before-dsa.dot"), dotBlockGraph(program.mainProcedure)) @@ -2346,7 +2345,7 @@ def getDoSimplifyTransform(validate: Boolean) = TransformBatch( DebugDumpIRLogger.writeToFile(File(s"${s}_il-before-dsa.il"), pp_prog(program)) } } - */ + */ onePassDsaTransform, inlinePLTLaunchpad, removeEmptyBlocksTransform, @@ -2369,7 +2368,7 @@ def getDoSimplifyTransform(validate: Boolean) = TransformBatch( DebugDumpIRLogger.writeToFile(File(s"${s}_il-after-dsa.il"), pp_prog(program)) } } - */ + */ // todo: only run iff ir.eval.SimplifyValidation.validate (that is: iff conf.validateSimp) dsaCheck, // todo: @@ -2387,7 +2386,7 @@ def getDoSimplifyTransform(validate: Boolean) = TransformBatch( ) } } - */ + */ liftLinuxAssertFailTransform, // assert(program.procedures.forall(transforms.rdDSAProperty)) // todo: transforms should have the ability to log their performance as soon as they finish @@ -2400,7 +2399,7 @@ def getDoSimplifyTransform(validate: Boolean) = TransformBatch( DebugDumpIRLogger.writeToFile(File(s"${s}_il-after-copyprop.il"), pp_prog(program)) } } - */ + */ // val x = program.procedures.forall(transforms.rdDSAProperty) // assert(x) dsaCheckAfterTransform, // todo: only run iff conf.validateSimp @@ -2412,7 +2411,7 @@ def getDoSimplifyTransform(validate: Boolean) = TransformBatch( DebugDumpIRLogger.writeToFile(File(s"${s}_il-after-slices.il"), pp_prog(program)) } } - */ + */ // re-apply dsa // transforms.OnePassDSA().applyTransform(program) logSimplificationValidation // todo: only run iff conf.validateSimp From 09a036380650cbc15a23fe1d9aa3a3386859e55c Mon Sep 17 00:00:00 2001 From: James Tobler <64625414+j-tobler@users.noreply.github.com> Date: Thu, 26 Jun 2025 09:29:44 +1000 Subject: [PATCH 15/30] changed the way performance timing is done in transforms --- src/main/scala/ir/transforms/Simp.scala | 16 ++++++++++++---- src/main/scala/ir/transforms/Transform.scala | 13 ++++++++----- 2 files changed, 20 insertions(+), 9 deletions(-) diff --git a/src/main/scala/ir/transforms/Simp.scala b/src/main/scala/ir/transforms/Simp.scala index 4ea37b22d8..a6155a08a9 100644 --- a/src/main/scala/ir/transforms/Simp.scala +++ b/src/main/scala/ir/transforms/Simp.scala @@ -2179,8 +2179,15 @@ val makeProcEntriesNonLoops = SingleTransform( // --- DoSimplify ------------------------------------------------------------------------------------------------------ -/* For the doSimplify transform, I've put all the relevant sub-transforms here. We might want to move these in -future, or merge them with the functions they call. */ +/* +For the doSimplify transform, I've put all the relevant sub-transforms here. We might want to move these in +future, or merge them with the functions they call. + +To finish the implementation of this transform, we need to: +[ ] Figure out when to dump files (we currently have random ir dumps commented out everywhere). +[ ] Add a TransformConfig option for performance logging and have copyprop log its timer. +[ ] Remove the non-transforms like dsaCheck and replace with some other logic. +*/ val reduceLoops = SingleTransform( "ReduceLoops", @@ -2372,8 +2379,9 @@ def getDoSimplifyTransform(validate: Boolean) = TransformBatch( // todo: only run iff ir.eval.SimplifyValidation.validate (that is: iff conf.validateSimp) dsaCheck, // todo: - // if config is set and config.dumpILToPath is set then dump il - // if config is set and config.analysisDotPath is set then dump blockgraph + // - if config is set and config.dumpILToPath is set then dump il + // - if config is set and config.analysisDotPath is set then dump blockgraph + // - always log performance for this transform copyPropParamFixedPointTransform, fixupGuardsTransform, removeDuplicateGuardsTransform, diff --git a/src/main/scala/ir/transforms/Transform.scala b/src/main/scala/ir/transforms/Transform.scala index 1c9719a6fe..870cf10e68 100644 --- a/src/main/scala/ir/transforms/Transform.scala +++ b/src/main/scala/ir/transforms/Transform.scala @@ -32,7 +32,11 @@ case class IrLog(filenamePrefix: String) extends Log { * @param disabled Optionally specify a set of transforms to disable. * @param dumpLogs Optionally specify which logs to dump for which transforms, if any. */ -case class TransformConfig(disabled: Set[Transform] = Set.empty, dumpLogs: Map[Transform, Set[Log]] = Map.empty) +case class TransformConfig( + disabled: Set[Transform] = Set.empty, + dumpLogs: Map[Transform, Set[Log]] = Map.empty, + logPerformance: Set[Transform] = Set.empty +) // default value for transforms val emptyConfig = TransformConfig() @@ -49,8 +53,6 @@ trait Transform { val name: String // optional message to log upon invocation of this transform val notice: String - // the performance of each transform is implicitly measured - val timer: PerformanceTimer = PerformanceTimer(name) // modifies the given IR context in-place, using the analysis results provided by this analysis manager protected def transform(ctx: IRContext, man: AnalysisManager, config: TransformConfig): man.Invalidation @@ -71,9 +73,10 @@ trait Transform { } val maybeLogs = config.dumpLogs.get(this) maybeLogs.foreach(_.foreach(_.dump(ctx, s"before-$name"))) - timer.checkPoint("start") + val logPerformance: Boolean = config.logPerformance.contains(this) + val timer: PerformanceTimer = if (logPerformance) then PerformanceTimer(name) else null val invalidation = transform(ctx, man, config) // run the actual transform and get the analysis results to clobber - timer.checkPoint("end") + if (logPerformance) then timer.checkPoint("delta") postRun(ctx) // runs after performance checks, and before logging maybeLogs.foreach(_.foreach(_.dump(ctx, s"after-$name"))) man.invalidate(invalidation) // clobber the specified analysis results From 891ee45da7f9ea68a63fa0c2f56bb3b3db76f084 Mon Sep 17 00:00:00 2001 From: James Tobler <64625414+j-tobler@users.noreply.github.com> Date: Thu, 26 Jun 2025 12:24:39 +1000 Subject: [PATCH 16/30] transform configurations are no longer passed to the apply method; streamlined the Transform and TransformBatch classes --- src/main/scala/ir/transforms/Transform.scala | 122 +++++++++---------- 1 file changed, 55 insertions(+), 67 deletions(-) diff --git a/src/main/scala/ir/transforms/Transform.scala b/src/main/scala/ir/transforms/Transform.scala index 870cf10e68..bc7eee219c 100644 --- a/src/main/scala/ir/transforms/Transform.scala +++ b/src/main/scala/ir/transforms/Transform.scala @@ -7,7 +7,7 @@ import analysis.AnalysisManager import translating.PrettyPrinter.pp_prog import java.io.File -// TransformConfig accepts Log instances which specify what kind of logs to dump for particular transforms +// transforms accept Log instances which specify what kind of logs to dump trait Log { def dump(ctx: IRContext, transformName: String): Unit } @@ -27,43 +27,48 @@ case class IrLog(filenamePrefix: String) extends Log { DebugDumpIRLogger.writeToFile(File(s"${filenamePrefix}_il-${transformName}.il"), pp_prog(ctx.program)) } -/** Allows the behaviour of transforms to be configured at runtime, upon invocation. - * - * @param disabled Optionally specify a set of transforms to disable. - * @param dumpLogs Optionally specify which logs to dump for which transforms, if any. +/** This case class defines the interface and configurable parameters for transform passes that modify the basil ir + * context in-place. It is designed to be read-only to avoid inter-dependencies between uses of the same transform + * instance. To configure the behaviour of a transform (e.g. based on arguments to the basil program), we supply + * a set of methods that each return a copy of the transform with a particular behaviour tweaked, such as which logs + * to dump before and after the transform runs, whether to log performance, or whether to disable the transform. */ -case class TransformConfig( - disabled: Set[Transform] = Set.empty, - dumpLogs: Map[Transform, Set[Log]] = Map.empty, - logPerformance: Set[Transform] = Set.empty -) - -// default value for transforms -val emptyConfig = TransformConfig() - -/** Currently, we have two kinds of transforms: SingleTransform, and TransformBatch. This trait provides a common - * interface for them to share. - * - * Transforms can be directly called to invoke the shared 'apply' method, which applies the transform in the context - * of some runtime configuration. They are designed to be read-only to avoid inter-dependencies between their users; if - * some configuration is required, it must be provided upon invocation. - */ -trait Transform { +case class Transform( // human-readable name of the transform; it is used in the names of generated log files - val name: String + name: String, + // the function to invoke when this transform is called + implementation: (ctx: IRContext, man: AnalysisManager) => man.Invalidation, // optional message to log upon invocation of this transform - val notice: String + notice: String = "", + // optional code to run after performance has been measured but before any logs are dumped; should not modify the ir + postRunChecks: IRContext => Unit = _ => () +) { + // set to false to make the apply method do nothing + val enabled: Boolean = true + // set to true to have the performance of this transform be measured and dumped with a PerformanceTimer + val logPerformance: Boolean = false + // a set of log types to dump before and after running the transform + val logsToDump: Set[Log] = Set.empty + + // the following methods return a copy of this transform with particular behaviours tweaked + + def when(cond: Boolean): Transform = copy(enabled = cond) + + def unless(cond: Boolean): Transform = copy(enabled = !cond) - // modifies the given IR context in-place, using the analysis results provided by this analysis manager - protected def transform(ctx: IRContext, man: AnalysisManager, config: TransformConfig): man.Invalidation + def timeIf(cond: Boolean): Transform = copy(logPerformance = cond) - // optional code to run *after* performance has been measured but *before* any logs are dumped, e.g. post-run checks - protected def postRun(ctx: IRContext): Unit + def withLogs(logs: Set[Log]): Transform = copy(logsToDump = logs) - // executes the transform with any modifications or book-keeping specified by the given config - def apply(ctx: IRContext, man: AnalysisManager, config: TransformConfig = emptyConfig): Unit = { - if (config.disabled.contains(this)) return - if (notice != "") then Logger.info(s"[!] ${notice}") + /** Applies this transform to the given ir context. This is effectively a wrapper for the `implementation` function + * that handles all of our configurable behaviour and the invalidation of analysis results. + * + * @param ctx The ir context to transform. + * @param man The analysis manager through which to access and invalidate analysis results. + */ + def apply(ctx: IRContext, man: AnalysisManager): Unit = { + if (!enabled) return + if (notice != "") Logger.info(s"[!] ${notice}") if (man.program ne ctx.program) { // the program we are transforming should be the same one for which the analysis results were produced throw new RuntimeException( @@ -71,46 +76,29 @@ trait Transform { s"reference value than the program being transformed." ) } - val maybeLogs = config.dumpLogs.get(this) - maybeLogs.foreach(_.foreach(_.dump(ctx, s"before-$name"))) - val logPerformance: Boolean = config.logPerformance.contains(this) - val timer: PerformanceTimer = if (logPerformance) then PerformanceTimer(name) else null - val invalidation = transform(ctx, man, config) // run the actual transform and get the analysis results to clobber - if (logPerformance) then timer.checkPoint("delta") - postRun(ctx) // runs after performance checks, and before logging - maybeLogs.foreach(_.foreach(_.dump(ctx, s"after-$name"))) - man.invalidate(invalidation) // clobber the specified analysis results + logsToDump.foreach(_.dump(ctx, s"before-$name")) + val timer: PerformanceTimer = if logPerformance then PerformanceTimer(name) else null + val invalidation = implementation(ctx, man) + if logPerformance then timer.checkPoint("delta") + postRunChecks(ctx) + logsToDump.foreach(_.dump(ctx, s"after-$name")) + man.invalidate(invalidation) } } -/** A standard transform. Accepts an implementation function which modifies the given IR context in-place. - */ -case class SingleTransform( - name: String, - implementation: (ctx: IRContext, man: AnalysisManager) => man.Invalidation, - notice: String = "" -) extends Transform { - // simply calls the given implementation function - def transform(ctx: IRContext, man: AnalysisManager, config: TransformConfig): man.Invalidation = - implementation(ctx, man) - - // standard transforms don't need anything here; post-run checks should be handled by the implementation - def postRun(ctx: IRContext): Unit = () -} - -/** A transform batch is a sequence of other transforms, followed by some optional post-run checks on the IR context. - */ -case class TransformBatch( +// helper method for constructing transforms that are sequences of other transforms +// we prefer this over invoking transforms in the implementations of other transforms +def TransformBatch( name: String, transforms: List[Transform], notice: String = "", postRunChecks: IRContext => Unit = _ => () -) extends Transform { - // runs each sub-transform in-turn (invalidation is handled by the sub-transforms) - def transform(ctx: IRContext, man: AnalysisManager, config: TransformConfig): man.Invalidation = { - transforms.foreach(_(ctx, man, config)) +): Transform = Transform( + name, + (ctx, man) => { + transforms.foreach(_(ctx, man)) man.PreserveAll - } - - def postRun(ctx: IRContext): Unit = postRunChecks(ctx) -} + }, + notice, + postRunChecks +) From d54d29c53ea1c953a7b79633d920040a2059b7a0 Mon Sep 17 00:00:00 2001 From: James Tobler <64625414+j-tobler@users.noreply.github.com> Date: Thu, 26 Jun 2025 15:21:18 +1000 Subject: [PATCH 17/30] updated usages of transforms and fixed transform copying --- .../SummaryGenerator.scala | 5 +-- .../RelyGuaranteeGeneration.scala | 2 +- .../ir/transforms/InlinePLTLaunchpad.scala | 2 +- .../ir/transforms/PrepareForTranslation.scala | 21 +++++---- .../scala/ir/transforms/ReplaceReturn.scala | 2 +- src/main/scala/ir/transforms/Simp.scala | 38 ++++++++-------- .../StripUnreachableFunctions.scala | 2 +- src/main/scala/ir/transforms/Transform.scala | 44 +++++++++++++------ src/main/scala/util/RunUtils.scala | 32 ++++---------- 9 files changed, 76 insertions(+), 72 deletions(-) diff --git a/src/main/scala/analysis/procedure_summaries/SummaryGenerator.scala b/src/main/scala/analysis/procedure_summaries/SummaryGenerator.scala index 321be166bc..a5275771fe 100644 --- a/src/main/scala/analysis/procedure_summaries/SummaryGenerator.scala +++ b/src/main/scala/analysis/procedure_summaries/SummaryGenerator.scala @@ -13,8 +13,7 @@ import ir.transforms.{ SCCCallgraphWorklistSolver, reversePostOrder, worklistSolver, - Transform, - SingleTransform + Transform } case class Condition(pred: Predicate, label: Option[String] = None) @@ -223,7 +222,7 @@ class SummaryGenerator(program: Program, parameterForm: Boolean = false) { } def getGenerateProcedureSummariesTransform(simplified: Boolean): Transform = - SingleTransform( + Transform( "GenerateProcedureSummaries", (ctx, man) => { val prog = ctx.program diff --git a/src/main/scala/analysis/rely_guarantee_generation/RelyGuaranteeGeneration.scala b/src/main/scala/analysis/rely_guarantee_generation/RelyGuaranteeGeneration.scala index 14a934195e..d2315f4366 100644 --- a/src/main/scala/analysis/rely_guarantee_generation/RelyGuaranteeGeneration.scala +++ b/src/main/scala/analysis/rely_guarantee_generation/RelyGuaranteeGeneration.scala @@ -152,7 +152,7 @@ class GuarGenSummaryGenerator[T, S](dom: InterferenceProductDomain[T, S]) } def getGenerateRgConditionsTransform(threads: List[Procedure]): Transform = - SingleTransform( + Transform( "GenerateRgConditions", (ctx, man) => { type StateLatticeElement = LatticeMap[Variable, analysis.Interval] diff --git a/src/main/scala/ir/transforms/InlinePLTLaunchpad.scala b/src/main/scala/ir/transforms/InlinePLTLaunchpad.scala index 41815a1605..cfb5b3039e 100644 --- a/src/main/scala/ir/transforms/InlinePLTLaunchpad.scala +++ b/src/main/scala/ir/transforms/InlinePLTLaunchpad.scala @@ -2,7 +2,7 @@ package ir.transforms import ir.{Program, Procedure} import ir.cilvisitor.visit_prog -val inlinePLTLaunchpad = SingleTransform( +val inlinePLTLaunchpad = Transform( "InlinePLTLaunchpad", (ctx, man) => { ctx.program.sortProceduresRPO() diff --git a/src/main/scala/ir/transforms/PrepareForTranslation.scala b/src/main/scala/ir/transforms/PrepareForTranslation.scala index 21308be8fa..2cbf085a50 100644 --- a/src/main/scala/ir/transforms/PrepareForTranslation.scala +++ b/src/main/scala/ir/transforms/PrepareForTranslation.scala @@ -4,11 +4,11 @@ import scala.collection.mutable import ir.Block import ir.cilvisitor.* import ir.invariant -import util.BASILConfig +import util.{BASILConfig, MemoryRegionsMode} import analysis.AnalysisManager // run iff arg.isEmpty || (arg.get.memoryRegions == MemoryRegionsMode.Disabled) -val determineRelevantMemory = SingleTransform( +val determineRelevantMemory = Transform( "DetermineRelevantMemory", (ctx, man) => { ctx.program.determineRelevantMemory(ctx.globalOffsets) @@ -17,7 +17,7 @@ val determineRelevantMemory = SingleTransform( ) // run iff arg -val stackSubstitution = SingleTransform( +val stackSubstitution = Transform( "StackSubstitution", (ctx, man) => { visit_prog(StackSubstituter(), ctx.program) @@ -25,7 +25,7 @@ val stackSubstitution = SingleTransform( } ) -val setModifies = SingleTransform( +val setModifies = Transform( "SetModifies", (ctx, man) => { val specModifies = ctx.specification.subroutines.map(s => s.name -> s.modifies).toMap @@ -35,7 +35,7 @@ val setModifies = SingleTransform( ) def getRenameBoogieKeywordsTransform(boogieReserved: Set[String]): Transform = - SingleTransform( + Transform( "RenameBoogieKeywords", (ctx, man) => { visit_prog(BoogieReservedRenamer(boogieReserved), ctx.program) @@ -46,12 +46,15 @@ def getRenameBoogieKeywordsTransform(boogieReserved: Set[String]): Transform = /** Cull unneccessary information that does not need to be included in the translation, and infer stack regions, and * add in modifies from the spec. */ -def getPrepareForTranslationTransform(trimDepth: Int, boogieReserved: Set[String]): Transform = TransformBatch( +def getPrepareForTranslationTransform(config: BASILConfig, boogieReserved: Set[String]): Transform = TransformBatch( "PrepareForTranslation", List( - determineRelevantMemory, - getStripUnreachableFunctionsTransform(trimDepth), - stackSubstitution, + determineRelevantMemory.when( + config.staticAnalysis.isEmpty || (config.staticAnalysis.get.memoryRegions == MemoryRegionsMode.Disabled)), + getStripUnreachableFunctionsTransform(config.loading.procedureTrimDepth), + stackSubstitution.when( + !config.memoryTransform && + (config.staticAnalysis.isEmpty || (config.staticAnalysis.get.memoryRegions == MemoryRegionsMode.Disabled))), setModifies, getRenameBoogieKeywordsTransform(boogieReserved: Set[String]) ), diff --git a/src/main/scala/ir/transforms/ReplaceReturn.scala b/src/main/scala/ir/transforms/ReplaceReturn.scala index 2465eaca2e..e4bbbe8f52 100644 --- a/src/main/scala/ir/transforms/ReplaceReturn.scala +++ b/src/main/scala/ir/transforms/ReplaceReturn.scala @@ -143,7 +143,7 @@ def establishProcedureDiamondForm(program: Program, doSimplify: Boolean = false) } def getEstablishProcedureDiamondFormTransform(doSimplify: Boolean): Transform = - SingleTransform( + Transform( "EstablishProcedureDiamondForm", (ctx, man) => { establishProcedureDiamondForm(ctx.program, doSimplify) diff --git a/src/main/scala/ir/transforms/Simp.scala b/src/main/scala/ir/transforms/Simp.scala index a6155a08a9..d9fae95dba 100644 --- a/src/main/scala/ir/transforms/Simp.scala +++ b/src/main/scala/ir/transforms/Simp.scala @@ -830,7 +830,7 @@ def coalesceBlocks(p: Program): Boolean = { didAny } -val coalesceBlocksOnce = SingleTransform( +val coalesceBlocksOnce = Transform( "CoalesceBlocksOnce", (ctx, man) => { coalesceBlocks(ctx.program) @@ -838,7 +838,7 @@ val coalesceBlocksOnce = SingleTransform( } ) -val coalesceBlocksFixpoint = SingleTransform( +val coalesceBlocksFixpoint = Transform( "CoalesceBlocksFixpoint", (ctx, man) => { while (coalesceBlocks(ctx.program)) {} @@ -1156,7 +1156,7 @@ def applyRPO(p: Program) = { } } -val applyRpoTransform = SingleTransform( +val applyRpoTransform = Transform( "ApplyRPO", (ctx, man) => { applyRPO(man.program) @@ -1897,7 +1897,7 @@ def findDefinitelyExits(p: Program): ProcReturnInfo = { ) } -val replaceJumpsInNonReturningProcs = SingleTransform( +val replaceJumpsInNonReturningProcs = Transform( "ReplaceJumpsInNonReturningProcs", (ctx, man) => { val nonReturning = findDefinitelyExits(ctx.program) @@ -1909,7 +1909,7 @@ val replaceJumpsInNonReturningProcs = SingleTransform( } ) -val removeExternalFunctionReferences = SingleTransform( +val removeExternalFunctionReferences = Transform( "RemoveExternalFunctionReferences", (ctx, man) => { val externalNames = ctx.externalFunctions.map(_.name) @@ -2159,7 +2159,7 @@ def removeTriviallyDeadBranches(p: Program, removeAllUnreachableBlocks: Boolean dead.nonEmpty } -val makeProcEntriesNonLoops = SingleTransform( +val makeProcEntriesNonLoops = Transform( "MakeProcEntriesNonLoops", (ctx, man) => { ctx.program.procedures.foreach(p => { @@ -2189,7 +2189,7 @@ To finish the implementation of this transform, we need to: [ ] Remove the non-transforms like dsaCheck and replace with some other logic. */ -val reduceLoops = SingleTransform( +val reduceLoops = Transform( "ReduceLoops", (ctx, man) => { val foundLoops = LoopDetector.identify_loops(ctx.program) @@ -2199,7 +2199,7 @@ val reduceLoops = SingleTransform( } ) -val normaliseBlockNamesTransform = SingleTransform( +val normaliseBlockNamesTransform = Transform( "NormaliseBlockNames", (ctx, man) => { for (p <- ctx.program.procedures) { @@ -2209,7 +2209,7 @@ val normaliseBlockNamesTransform = SingleTransform( } ) -val sortProceduresRpoTransform = SingleTransform( +val sortProceduresRpoTransform = Transform( "NormaliseBlockNames", (ctx, man) => { ctx.program.sortProceduresRPO() @@ -2217,7 +2217,7 @@ val sortProceduresRpoTransform = SingleTransform( } ) -val liftSvCompTransform = SingleTransform( +val liftSvCompTransform = Transform( "LiftSvComp", (ctx, man) => { liftSVComp(ctx.program) @@ -2225,7 +2225,7 @@ val liftSvCompTransform = SingleTransform( } ) -val removeEmptyBlocksTransform = SingleTransform( +val removeEmptyBlocksTransform = Transform( "RemoveEmptyBlocks", (ctx, man) => { removeEmptyBlocks(ctx.program) @@ -2233,7 +2233,7 @@ val removeEmptyBlocksTransform = SingleTransform( } ) -val onePassDsaTransform = SingleTransform( +val onePassDsaTransform = Transform( "OnePassDsa", (ctx, man) => { OnePassDSA().applyTransform(ctx.program) @@ -2242,7 +2242,7 @@ val onePassDsaTransform = SingleTransform( ) // fixme: this is not really a transform, but a check on the ir -val dsaCheck = SingleTransform( +val dsaCheck = Transform( "DsaCheck", (ctx, man) => { Logger.info("DSA no uninitialised") @@ -2263,7 +2263,7 @@ val dsaCheck = SingleTransform( ) // fixme: similar issues to the above transform -val dsaCheckAfterTransform = SingleTransform( +val dsaCheckAfterTransform = Transform( "DsaCheckAfterTransform", (ctx, man) => { Logger.info("DSA Check (after transform)") @@ -2276,7 +2276,7 @@ val dsaCheckAfterTransform = SingleTransform( // fixme: similar issues to the above // we might want to move this out of the transform, to the callsite -val logSimplificationValidation = SingleTransform( +val logSimplificationValidation = Transform( "LogSimplificationValidation", (ctx, man) => { Logger.info("[!] Simplify :: Writing simplification validation") @@ -2287,7 +2287,7 @@ val logSimplificationValidation = SingleTransform( } ) -val copyPropParamFixedPointTransform = SingleTransform( +val copyPropParamFixedPointTransform = Transform( "CopyPropParamFixedPoint", (ctx, man) => { copyPropParamFixedPoint(ctx.program, ctx.globalOffsets) @@ -2296,7 +2296,7 @@ val copyPropParamFixedPointTransform = SingleTransform( notice = "Copyprop Start" ) -val fixupGuardsTransform = SingleTransform( +val fixupGuardsTransform = Transform( "FixUpGuards", (ctx, man) => { fixupGuards(ctx.program) @@ -2304,7 +2304,7 @@ val fixupGuardsTransform = SingleTransform( } ) -val removeDuplicateGuardsTransform = SingleTransform( +val removeDuplicateGuardsTransform = Transform( "RemoveDuplicateGuards", (ctx, man) => { removeDuplicateGuard(ctx.program) @@ -2312,7 +2312,7 @@ val removeDuplicateGuardsTransform = SingleTransform( } ) -val liftLinuxAssertFailTransform = SingleTransform( +val liftLinuxAssertFailTransform = Transform( "LiftLinuxAssertFail", (ctx, man) => { liftLinuxAssertFail(ctx) diff --git a/src/main/scala/ir/transforms/StripUnreachableFunctions.scala b/src/main/scala/ir/transforms/StripUnreachableFunctions.scala index 968f426cb3..88f30ad400 100644 --- a/src/main/scala/ir/transforms/StripUnreachableFunctions.scala +++ b/src/main/scala/ir/transforms/StripUnreachableFunctions.scala @@ -53,7 +53,7 @@ def stripUnreachableFunctions(p: Program, depth: Int): Unit = { } def getStripUnreachableFunctionsTransform(depth: Int): Transform = - SingleTransform( + Transform( "StripUnreachableFunctions", (ctx, man) => { val before = ctx.program.procedures.size diff --git a/src/main/scala/ir/transforms/Transform.scala b/src/main/scala/ir/transforms/Transform.scala index bc7eee219c..4444bb59c3 100644 --- a/src/main/scala/ir/transforms/Transform.scala +++ b/src/main/scala/ir/transforms/Transform.scala @@ -32,33 +32,51 @@ case class IrLog(filenamePrefix: String) extends Log { * instance. To configure the behaviour of a transform (e.g. based on arguments to the basil program), we supply * a set of methods that each return a copy of the transform with a particular behaviour tweaked, such as which logs * to dump before and after the transform runs, whether to log performance, or whether to disable the transform. + * + * @param name Human-readable name of the transform; it is used in the names of generated log files. + * @param implementation The function to invoke when this transform is called. + * @param notice Optional message to log upon invocation of this transform (for important transforms). + * @param postRunChecks Optional code to run after performance has been measured but before any logs are dumped. */ case class Transform( - // human-readable name of the transform; it is used in the names of generated log files name: String, - // the function to invoke when this transform is called implementation: (ctx: IRContext, man: AnalysisManager) => man.Invalidation, - // optional message to log upon invocation of this transform notice: String = "", - // optional code to run after performance has been measured but before any logs are dumped; should not modify the ir postRunChecks: IRContext => Unit = _ => () ) { + /* The following fields are configurable via the below methods, which return copies of the transform. + Every transform can be assumed to have these defaults unless it was created via one of these methods. */ + // set to false to make the apply method do nothing - val enabled: Boolean = true + private var enabled: Boolean = true // set to true to have the performance of this transform be measured and dumped with a PerformanceTimer - val logPerformance: Boolean = false + private var logPerformance: Boolean = false // a set of log types to dump before and after running the transform - val logsToDump: Set[Log] = Set.empty - - // the following methods return a copy of this transform with particular behaviours tweaked + private var logsToDump: Set[Log] = Set.empty - def when(cond: Boolean): Transform = copy(enabled = cond) + def when(cond: Boolean): Transform = { + val cp = copy() + cp.enabled = cond + return cp + } - def unless(cond: Boolean): Transform = copy(enabled = !cond) + def unless(cond: Boolean): Transform = { + val cp = copy() + cp.enabled = !cond + return cp + } - def timeIf(cond: Boolean): Transform = copy(logPerformance = cond) + def timeIf(cond: Boolean): Transform = { + val cp = copy() + cp.logPerformance = cond + return cp + } - def withLogs(logs: Set[Log]): Transform = copy(logsToDump = logs) + def withLogs(logs: Set[Log]): Transform = { + val cp = copy() + cp.logsToDump = logs + return cp + } /** Applies this transform to the given ir context. This is effectively a wrapper for the `implementation` function * that handles all of our configurable behaviour and the invalidation of analysis results. diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index 7ee7b96f9f..b4382fe875 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -726,39 +726,23 @@ object RunUtils { assert(invariant.cfgCorrect(ctx.program)) assert(invariant.blocksUniqueToEachProcedure(ctx.program)) - // derive transform configuration from basil config val analysisManager = AnalysisManager(ctx.program) - val disabledTransforms: mutable.Set[Transform] = mutable.Set.empty - val transformsToLog: mutable.Map[Transform, Set[Log]] = mutable.Map.empty - - if (!q.staticAnalysis.isEmpty && q.staticAnalysis.get.memoryRegions != MemoryRegionsMode.Disabled) { - disabledTransforms += determineRelevantMemory - } - if ( - q.memoryTransform || (!q.staticAnalysis.isEmpty && q.staticAnalysis.get.memoryRegions != MemoryRegionsMode.Disabled) - ) { - disabledTransforms += stackSubstitution - } - - // note: we might not necessarily want the same config for all transforms - val transformConfig = TransformConfig(disabled = disabledTransforms.toSet, dumpLogs = transformsToLog.toMap) - // these transforms depend on basil config parameters and thus need to be constructed here - val prepareForTranslation = getPrepareForTranslationTransform(q.loading.procedureTrimDepth, Set("free")) - val genProcSummaries = getGenerateProcedureSummariesTransform(q.loading.parameterForm || conf.simplify) + val prepareForTranslation = getPrepareForTranslationTransform(q, Set("free")) + val genProcSummaries = getGenerateProcedureSummariesTransform(q.loading.parameterForm || q.simplify) val genRgConditions = getGenerateRgConditionsTransform(ctx.program.procedures.toList.filter(_.returnBlock != None)) val stripUnreachableFunctions = getStripUnreachableFunctionsTransform(q.loading.procedureTrimDepth) - if conf.simplify then doCleanupWithSimplify(ctx, analysisManager, transformConfig) - else doCleanupWithoutSimplify(ctx, analysisManager, transformConfig) + if conf.simplify then doCleanupWithSimplify(ctx, analysisManager) + else doCleanupWithoutSimplify(ctx, analysisManager) assert(ir.invariant.programDiamondForm(ctx.program)) - transforms.inlinePLTLaunchpad(ctx, analysisManager, transformConfig) + transforms.inlinePLTLaunchpad(ctx, analysisManager) assert(ir.invariant.programDiamondForm(ctx.program)) - if (q.loading.trimEarly) then stripUnreachableFunctions(ctx, analysisManager, transformConfig) + if q.loading.trimEarly then stripUnreachableFunctions(ctx, analysisManager) // todo: since refactoring, there is some extra code that is run here // see StripUnreachableFunctions.getStripUnreachableFunctionsTransform @@ -865,9 +849,9 @@ object RunUtils { } } - prepareForTranslation(ctx, analysisManager, transformConfig) + prepareForTranslation(ctx, analysisManager) - if (conf.generateRelyGuarantees) then genRgConditions(ctx, analysisManager) + if conf.generateRelyGuarantees then genRgConditions(ctx, analysisManager) q.loading.dumpIL.foreach(s => { val timer = PerformanceTimer("Dump IL") From 2b06f1b2fe95c0a47df6b8d350c875a9894b3f03 Mon Sep 17 00:00:00 2001 From: James Tobler <64625414+j-tobler@users.noreply.github.com> Date: Thu, 26 Jun 2025 15:22:04 +1000 Subject: [PATCH 18/30] scalafmt --- src/main/scala/ir/transforms/PrepareForTranslation.scala | 6 ++++-- src/main/scala/ir/transforms/Simp.scala | 2 +- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/src/main/scala/ir/transforms/PrepareForTranslation.scala b/src/main/scala/ir/transforms/PrepareForTranslation.scala index 2cbf085a50..7af7eba73f 100644 --- a/src/main/scala/ir/transforms/PrepareForTranslation.scala +++ b/src/main/scala/ir/transforms/PrepareForTranslation.scala @@ -50,11 +50,13 @@ def getPrepareForTranslationTransform(config: BASILConfig, boogieReserved: Set[S "PrepareForTranslation", List( determineRelevantMemory.when( - config.staticAnalysis.isEmpty || (config.staticAnalysis.get.memoryRegions == MemoryRegionsMode.Disabled)), + config.staticAnalysis.isEmpty || (config.staticAnalysis.get.memoryRegions == MemoryRegionsMode.Disabled) + ), getStripUnreachableFunctionsTransform(config.loading.procedureTrimDepth), stackSubstitution.when( !config.memoryTransform && - (config.staticAnalysis.isEmpty || (config.staticAnalysis.get.memoryRegions == MemoryRegionsMode.Disabled))), + (config.staticAnalysis.isEmpty || (config.staticAnalysis.get.memoryRegions == MemoryRegionsMode.Disabled)) + ), setModifies, getRenameBoogieKeywordsTransform(boogieReserved: Set[String]) ), diff --git a/src/main/scala/ir/transforms/Simp.scala b/src/main/scala/ir/transforms/Simp.scala index d9fae95dba..ae0b64b640 100644 --- a/src/main/scala/ir/transforms/Simp.scala +++ b/src/main/scala/ir/transforms/Simp.scala @@ -2187,7 +2187,7 @@ To finish the implementation of this transform, we need to: [ ] Figure out when to dump files (we currently have random ir dumps commented out everywhere). [ ] Add a TransformConfig option for performance logging and have copyprop log its timer. [ ] Remove the non-transforms like dsaCheck and replace with some other logic. -*/ + */ val reduceLoops = Transform( "ReduceLoops", From 0bed8a1f8090708dfe3deccbf83c84c1309cc4b3 Mon Sep 17 00:00:00 2001 From: James Tobler <64625414+j-tobler@users.noreply.github.com> Date: Thu, 26 Jun 2025 15:58:25 +1000 Subject: [PATCH 19/30] minor cleanup --- src/main/scala/ir/transforms/Simp.scala | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/src/main/scala/ir/transforms/Simp.scala b/src/main/scala/ir/transforms/Simp.scala index 4093fe53d6..a6e7d79602 100644 --- a/src/main/scala/ir/transforms/Simp.scala +++ b/src/main/scala/ir/transforms/Simp.scala @@ -2175,15 +2175,7 @@ val makeProcEntriesNonLoops = Transform( // --- DoSimplify ------------------------------------------------------------------------------------------------------ -/* -For the doSimplify transform, I've put all the relevant sub-transforms here. We might want to move these in -future, or merge them with the functions they call. - -To finish the implementation of this transform, we need to: -[ ] Figure out when to dump files (we currently have random ir dumps commented out everywhere). -[ ] Add a TransformConfig option for performance logging and have copyprop log its timer. -[ ] Remove the non-transforms like dsaCheck and replace with some other logic. - */ +// the following code is a work in progress val reduceLoops = Transform( "ReduceLoops", From 9c5c74d4365e4194da3606d9afacd695485a204b Mon Sep 17 00:00:00 2001 From: Alistair Michael Date: Thu, 3 Jul 2025 17:09:48 +1000 Subject: [PATCH 20/30] move old analysis to separate file --- .../scala/analysis/AnalysePipelineMRA.scala | 318 ++++++++++++++++++ src/main/scala/util/RunUtils.scala | 301 +---------------- src/test/scala/PointsToTest.scala | 4 +- 3 files changed, 321 insertions(+), 302 deletions(-) create mode 100644 src/main/scala/analysis/AnalysePipelineMRA.scala diff --git a/src/main/scala/analysis/AnalysePipelineMRA.scala b/src/main/scala/analysis/AnalysePipelineMRA.scala new file mode 100644 index 0000000000..615ba1290d --- /dev/null +++ b/src/main/scala/analysis/AnalysePipelineMRA.scala @@ -0,0 +1,318 @@ +package analysis + +import analysis.{Interval as _, *} +import boogie.* +import ir.* +import specification.* +import util.{ + AnalysisResultDotLogger, + DebugDumpIRLogger, + IRContext, + Logger, + StaticAnalysisConfig, + StaticAnalysisContext, + StaticAnalysisLogger, + writeToFile +} + +import java.io.File +import scala.collection.mutable + +/** Methods relating to program static analysis. + */ +object AnalysisPipelineMRA { + + /** Run all static analysis passes on the provided IRProgram. + */ + def analyse( + ictx: IRContext, + config: StaticAnalysisConfig, + iteration: Int, + previousResults: Option[StaticAnalysisContext] = None + ): StaticAnalysisContext = { + var ctx = ictx + val IRProgram: Program = ctx.program + val externalFunctions: Set[ExternalFunction] = ctx.externalFunctions + val globals: Set[SpecGlobal] = ctx.globals + val globalOffsets: Map[BigInt, BigInt] = ctx.globalOffsets + + assert(invariant.singleCallBlockEnd(ctx.program)) + assert(invariant.cfgCorrect(ctx.program)) + assert(invariant.blocksUniqueToEachProcedure(ctx.program)) + assert(invariant.correctCalls(ctx.program)) + + val subroutines = IRProgram.procedures + .filter(p => p.address.isDefined) + .map(p => p.address.get -> p.name) + .toMap + val globalAddresses = globals.map(s => s.address -> s.name).toMap + val globalSizes = globals.map(s => s.name -> s.size).toMap + val externalAddresses = externalFunctions.map(e => e.offset -> e.name).toMap + StaticAnalysisLogger.debug("Globals:") + StaticAnalysisLogger.debug(globalAddresses) + StaticAnalysisLogger.debug("Global Offsets: ") + StaticAnalysisLogger.debug(globalOffsets) + StaticAnalysisLogger.debug("Global Sizes: ") + StaticAnalysisLogger.debug(globalSizes) + StaticAnalysisLogger.debug("External: ") + StaticAnalysisLogger.debug(externalAddresses) + StaticAnalysisLogger.debug("Subroutine Addresses:") + StaticAnalysisLogger.debug(subroutines) + + StaticAnalysisLogger.debug("reducible loops") + // reducible loops + if (config.irreducibleLoops) { + val foundLoops = LoopDetector.identify_loops(IRProgram) + foundLoops.irreducibleLoops.foreach(l => StaticAnalysisLogger.debug(s"Irreducible loop found: ${l.name}")) + + val newLoops = foundLoops.reducibleTransformIR().identifiedLoops + newLoops.foreach(l => StaticAnalysisLogger.debug(s"Loop found: ${l.name}")) + + foundLoops.updateIrWithLoops() + + config.analysisDotPath.foreach { s => + AnalysisResultDotLogger.writeToFile( + File(s"${s}_graph-after-loop-reduce-$iteration.dot"), + dotBlockGraph(IRProgram, IRProgram.map(b => b -> b.toString).toMap) + ) + AnalysisResultDotLogger.writeToFile( + File(s"${s}_blockgraph-after-loop-reduce-$iteration.dot"), + dotBlockGraph(IRProgram, IRProgram.filter(_.isInstanceOf[Block]).map(b => b -> b.toString).toMap) + ) + } + } + + val mergedSubroutines = subroutines ++ externalAddresses + + val domain = computeDomain(IntraProcIRCursor, IRProgram.procedures) + val interDomain = computeDomain(InterProcIRCursor, IRProgram.procedures) + + StaticAnalysisLogger.debug("[!] Running ANR") + val ANRSolver = ANRAnalysisSolver(IRProgram) + val ANRResult = ANRSolver.analyze() + + StaticAnalysisLogger.debug("[!] Running RNA") + val RNASolver = RNAAnalysisSolver(IRProgram) + val RNAResult = RNASolver.analyze() + + StaticAnalysisLogger.debug("[!] Running Inter-procedural Constant Propagation") + val interProcConstProp = InterProcConstantPropagation(IRProgram) + val interProcConstPropResult: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]] = + interProcConstProp.analyze() + + config.analysisResultsPath.foreach { s => + DebugDumpIRLogger.writeToFile( + File(s"${s}OGconstprop$iteration.txt"), + printAnalysisResults(IRProgram, interProcConstPropResult) + ) + } + + val intraProcConstProp = IntraProcConstantPropagation(IRProgram) + val intraProcConstPropResult: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]] = + intraProcConstProp.analyze() + + config.analysisResultsPath.foreach { s => + DebugDumpIRLogger.writeToFile( + File(s"${s}_new_ir_constprop$iteration.txt"), + printAnalysisResults(IRProgram, intraProcConstPropResult) + ) + } + + config.analysisDotPath.foreach { f => + val dumpdomain = computeDomain[CFGPosition, CFGPosition](InterProcIRCursor, IRProgram.procedures) + AnalysisResultDotLogger.writeToFile( + File(s"${f}_new_ir_intercfg$iteration.dot"), + toDot(dumpdomain.toSet, InterProcIRCursor, Map.empty, Set()) + ) + } + + val reachingDefinitionsAnalysisSolver = InterprocReachingDefinitionsAnalysisSolver(IRProgram) + val reachingDefinitionsAnalysisResults = reachingDefinitionsAnalysisSolver.analyze() + + config.analysisDotPath.foreach { s => + AnalysisResultDotLogger.writeToFile( + File(s"${s}_reachingDefinitions$iteration.dot"), + toDot( + IRProgram, + IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> reachingDefinitionsAnalysisResults(b).toString).toMap, + true + ) + ) + } + + Logger.debug("[!] Running Writes To") + val writesTo = WriteToAnalysis(ctx.program).analyze() + + Logger.debug("[!] Running commondef variable renaming (Intra SSA)") + val SSAResults = getCommonDefinitionVariableRenaming(IRProgram, writesTo) + + config.analysisDotPath.foreach(s => { + writeToFile( + toDot(IRProgram, IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> SSAResults(b).toString).toMap, true), + s"${s}_SSA$iteration.dot" + ) + }) + + val mmm = MemoryModelMap(globalOffsets, mergedSubroutines, globalAddresses, globalSizes) + mmm.preLoadGlobals() + + val previousVSAResults = if (previousResults.isDefined) { + previousResults.get.vsaResult + } else { + Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]]() + } + + StaticAnalysisLogger.debug("[!] Running GRA") + val graSolver = GlobalRegionAnalysisSolver( + IRProgram, + domain.toSet, + interProcConstPropResult, + reachingDefinitionsAnalysisResults, + mmm, + previousVSAResults + ) + val graResult = graSolver.analyze() + + StaticAnalysisLogger.debug("[!] Running MRA") + val mraSolver = MemoryRegionAnalysisSolver( + IRProgram, + domain.toSet, + interProcConstPropResult, + reachingDefinitionsAnalysisResults, + graResult, + mmm, + previousVSAResults + ) + val mraResult = mraSolver.analyze() + + config.analysisDotPath.foreach { s => + AnalysisResultDotLogger.writeToFile(File(s"${s}_callgraph$iteration.dot"), dotCallGraph(IRProgram)) + AnalysisResultDotLogger.writeToFile( + File(s"${s}_blockgraph$iteration.dot"), + dotBlockGraph(IRProgram, IRProgram.filter(_.isInstanceOf[Block]).map(b => b -> b.toString).toMap) + ) + + AnalysisResultDotLogger.writeToFile( + File(s"${s}_new_ir_constprop$iteration.dot"), + toDot( + IRProgram, + IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> intraProcConstPropResult(b).toString).toMap + ) + ) + + writeToFile( + toDot(IRProgram, IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> ANRResult(b).toString).toMap), + s"${s}_ANR$iteration.dot" + ) + + writeToFile( + toDot(IRProgram, IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> RNAResult(b).toString).toMap), + s"${s}_RNA$iteration.dot" + ) + + writeToFile( + toDot(IRProgram, IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> mraResult(b).toString).toMap), + s"${s}_MRA$iteration.dot" + ) + + AnalysisResultDotLogger.writeToFile( + File(s"${s}_GRA$iteration.dot"), + toDot(IRProgram, IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> graResult(b).toString).toMap) + ) + } + + StaticAnalysisLogger.debug("[!] Running MMM") + mmm.convertMemoryRegions( + mraSolver.procedureToStackRegions, + mraSolver.procedureToHeapRegions, + mraResult, + mraSolver.procedureToSharedRegions, + graSolver.getDataMap, + graResult + ) + mmm.logRegions() + + Logger.debug("[!] Running VSA") + val vsaSolver = ValueSetAnalysisSolver(IRProgram, mmm) + val vsaResult: Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]] = vsaSolver.analyze() + + mmm.postLoadVSARelations(vsaResult, ANRResult, RNAResult) + + config.analysisDotPath.foreach { s => + AnalysisResultDotLogger.writeToFile( + File(s"${s}_VSA$iteration.dot"), + toDot(IRProgram, IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> vsaResult(b).toString).toMap) + ) + } + + Logger.debug("[!] Running Steensgaard") + val steensgaardSolver = InterprocSteensgaardAnalysis(interDomain.toSet, mmm, SSAResults) + steensgaardSolver.analyze() + val steensgaardResults = steensgaardSolver.pointsTo() + + mmm.setCallSiteSummaries(steensgaardSolver.callSiteSummary) + + val paramResults: Map[Procedure, Set[Variable]] = ParamAnalysis(IRProgram).analyze() + val interLiveVarsResults: Map[CFGPosition, Map[Variable, TwoElement]] = InterLiveVarsAnalysis(IRProgram).analyze() + + StaticAnalysisContext( + intraProcConstProp = intraProcConstPropResult, + interProcConstProp = interProcConstPropResult, + memoryRegionResult = mraResult, + vsaResult = vsaResult, + interLiveVarsResults = interLiveVarsResults, + paramResults = paramResults, + steensgaardResults = steensgaardResults, + mmmResults = mmm, + symbolicAddresses = Map.empty, + reachingDefs = reachingDefinitionsAnalysisResults, + regionInjector = None, + localDSA = Map.empty, + bottomUpDSA = Map.empty, + topDownDSA = Map.empty, + writesToResult = writesTo, + ssaResults = SSAResults + ) + } + + def printAnalysisResults(prog: Program, result: Map[CFGPosition, _]): String = { + val results = mutable.ArrayBuffer[String]() + val toVisit = mutable.Stack[CFGPosition]() + val visited = mutable.HashSet[CFGPosition]() + toVisit.pushAll(prog.procedures) + + while (toVisit.nonEmpty) { + val next = toVisit.pop() + visited.add(next) + toVisit.pushAll( + IntraProcBlockIRCursor + .succ(next) + .diff(visited.collect[Block] { case b: Block => + b + }) + ) + + def contentStr(b: CFGPosition) = { + if result.contains(b) then "\n :: " + result(b) + else "" + } + + val t = next match + case p: Procedure => s"\nProcedure ${p.name}" + case b: Block => + Seq( + s" Block ${b.label}${contentStr(b)}", + b.statements + .map(s => { + " " + s.toString + contentStr(s) + }) + .mkString("\n"), + " " + b.jump.toString + contentStr(b.jump) + ).mkString("\n") + case s: Statement => s" Statement $s${contentStr(s)}" + case s: Jump => s" Jump $s${contentStr(s)}" + results.addOne(t) + } + results.mkString(System.lineSeparator()) + } +} diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index f459737448..644ae0618c 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -220,305 +220,6 @@ object IRLoading { } } -/** Methods relating to program static analysis. - */ -object StaticAnalysis { - - /** Run all static analysis passes on the provided IRProgram. - */ - def analyse( - ictx: IRContext, - config: StaticAnalysisConfig, - iteration: Int, - previousResults: Option[StaticAnalysisContext] = None - ): StaticAnalysisContext = { - var ctx = ictx - val IRProgram: Program = ctx.program - val externalFunctions: Set[ExternalFunction] = ctx.externalFunctions - val globals: Set[SpecGlobal] = ctx.globals - val globalOffsets: Map[BigInt, BigInt] = ctx.globalOffsets - - assert(invariant.singleCallBlockEnd(ctx.program)) - assert(invariant.cfgCorrect(ctx.program)) - assert(invariant.blocksUniqueToEachProcedure(ctx.program)) - assert(invariant.correctCalls(ctx.program)) - - val subroutines = IRProgram.procedures - .filter(p => p.address.isDefined) - .map(p => p.address.get -> p.name) - .toMap - val globalAddresses = globals.map(s => s.address -> s.name).toMap - val globalSizes = globals.map(s => s.name -> s.size).toMap - val externalAddresses = externalFunctions.map(e => e.offset -> e.name).toMap - StaticAnalysisLogger.debug("Globals:") - StaticAnalysisLogger.debug(globalAddresses) - StaticAnalysisLogger.debug("Global Offsets: ") - StaticAnalysisLogger.debug(globalOffsets) - StaticAnalysisLogger.debug("Global Sizes: ") - StaticAnalysisLogger.debug(globalSizes) - StaticAnalysisLogger.debug("External: ") - StaticAnalysisLogger.debug(externalAddresses) - StaticAnalysisLogger.debug("Subroutine Addresses:") - StaticAnalysisLogger.debug(subroutines) - - StaticAnalysisLogger.debug("reducible loops") - // reducible loops - if (config.irreducibleLoops) { - val foundLoops = LoopDetector.identify_loops(IRProgram) - foundLoops.irreducibleLoops.foreach(l => StaticAnalysisLogger.debug(s"Irreducible loop found: ${l.name}")) - - val newLoops = foundLoops.reducibleTransformIR().identifiedLoops - newLoops.foreach(l => StaticAnalysisLogger.debug(s"Loop found: ${l.name}")) - - foundLoops.updateIrWithLoops() - - config.analysisDotPath.foreach { s => - AnalysisResultDotLogger.writeToFile( - File(s"${s}_graph-after-loop-reduce-$iteration.dot"), - dotBlockGraph(IRProgram, IRProgram.map(b => b -> b.toString).toMap) - ) - AnalysisResultDotLogger.writeToFile( - File(s"${s}_blockgraph-after-loop-reduce-$iteration.dot"), - dotBlockGraph(IRProgram, IRProgram.filter(_.isInstanceOf[Block]).map(b => b -> b.toString).toMap) - ) - } - } - - val mergedSubroutines = subroutines ++ externalAddresses - - val domain = computeDomain(IntraProcIRCursor, IRProgram.procedures) - val interDomain = computeDomain(InterProcIRCursor, IRProgram.procedures) - - StaticAnalysisLogger.debug("[!] Running ANR") - val ANRSolver = ANRAnalysisSolver(IRProgram) - val ANRResult = ANRSolver.analyze() - - StaticAnalysisLogger.debug("[!] Running RNA") - val RNASolver = RNAAnalysisSolver(IRProgram) - val RNAResult = RNASolver.analyze() - - StaticAnalysisLogger.debug("[!] Running Inter-procedural Constant Propagation") - val interProcConstProp = InterProcConstantPropagation(IRProgram) - val interProcConstPropResult: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]] = - interProcConstProp.analyze() - - config.analysisResultsPath.foreach { s => - DebugDumpIRLogger.writeToFile( - File(s"${s}OGconstprop$iteration.txt"), - printAnalysisResults(IRProgram, interProcConstPropResult) - ) - } - - val intraProcConstProp = IntraProcConstantPropagation(IRProgram) - val intraProcConstPropResult: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]] = - intraProcConstProp.analyze() - - config.analysisResultsPath.foreach { s => - DebugDumpIRLogger.writeToFile( - File(s"${s}_new_ir_constprop$iteration.txt"), - printAnalysisResults(IRProgram, intraProcConstPropResult) - ) - } - - config.analysisDotPath.foreach { f => - val dumpdomain = computeDomain[CFGPosition, CFGPosition](InterProcIRCursor, IRProgram.procedures) - AnalysisResultDotLogger.writeToFile( - File(s"${f}_new_ir_intercfg$iteration.dot"), - toDot(dumpdomain.toSet, InterProcIRCursor, Map.empty, Set()) - ) - } - - val reachingDefinitionsAnalysisSolver = InterprocReachingDefinitionsAnalysisSolver(IRProgram) - val reachingDefinitionsAnalysisResults = reachingDefinitionsAnalysisSolver.analyze() - - config.analysisDotPath.foreach { s => - AnalysisResultDotLogger.writeToFile( - File(s"${s}_reachingDefinitions$iteration.dot"), - toDot( - IRProgram, - IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> reachingDefinitionsAnalysisResults(b).toString).toMap, - true - ) - ) - } - - Logger.debug("[!] Running Writes To") - val writesTo = WriteToAnalysis(ctx.program).analyze() - - Logger.debug("[!] Running commondef variable renaming (Intra SSA)") - val SSAResults = getCommonDefinitionVariableRenaming(IRProgram, writesTo) - - config.analysisDotPath.foreach(s => { - writeToFile( - toDot(IRProgram, IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> SSAResults(b).toString).toMap, true), - s"${s}_SSA$iteration.dot" - ) - }) - - val mmm = MemoryModelMap(globalOffsets, mergedSubroutines, globalAddresses, globalSizes) - mmm.preLoadGlobals() - - val previousVSAResults = if (previousResults.isDefined) { - previousResults.get.vsaResult - } else { - Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]]() - } - - StaticAnalysisLogger.debug("[!] Running GRA") - val graSolver = GlobalRegionAnalysisSolver( - IRProgram, - domain.toSet, - interProcConstPropResult, - reachingDefinitionsAnalysisResults, - mmm, - previousVSAResults - ) - val graResult = graSolver.analyze() - - StaticAnalysisLogger.debug("[!] Running MRA") - val mraSolver = MemoryRegionAnalysisSolver( - IRProgram, - domain.toSet, - interProcConstPropResult, - reachingDefinitionsAnalysisResults, - graResult, - mmm, - previousVSAResults - ) - val mraResult = mraSolver.analyze() - - config.analysisDotPath.foreach { s => - AnalysisResultDotLogger.writeToFile(File(s"${s}_callgraph$iteration.dot"), dotCallGraph(IRProgram)) - AnalysisResultDotLogger.writeToFile( - File(s"${s}_blockgraph$iteration.dot"), - dotBlockGraph(IRProgram, IRProgram.filter(_.isInstanceOf[Block]).map(b => b -> b.toString).toMap) - ) - - AnalysisResultDotLogger.writeToFile( - File(s"${s}_new_ir_constprop$iteration.dot"), - toDot( - IRProgram, - IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> intraProcConstPropResult(b).toString).toMap - ) - ) - - writeToFile( - toDot(IRProgram, IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> ANRResult(b).toString).toMap), - s"${s}_ANR$iteration.dot" - ) - - writeToFile( - toDot(IRProgram, IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> RNAResult(b).toString).toMap), - s"${s}_RNA$iteration.dot" - ) - - writeToFile( - toDot(IRProgram, IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> mraResult(b).toString).toMap), - s"${s}_MRA$iteration.dot" - ) - - AnalysisResultDotLogger.writeToFile( - File(s"${s}_GRA$iteration.dot"), - toDot(IRProgram, IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> graResult(b).toString).toMap) - ) - } - - StaticAnalysisLogger.debug("[!] Running MMM") - mmm.convertMemoryRegions( - mraSolver.procedureToStackRegions, - mraSolver.procedureToHeapRegions, - mraResult, - mraSolver.procedureToSharedRegions, - graSolver.getDataMap, - graResult - ) - mmm.logRegions() - - Logger.debug("[!] Running VSA") - val vsaSolver = ValueSetAnalysisSolver(IRProgram, mmm) - val vsaResult: Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]] = vsaSolver.analyze() - - mmm.postLoadVSARelations(vsaResult, ANRResult, RNAResult) - - config.analysisDotPath.foreach { s => - AnalysisResultDotLogger.writeToFile( - File(s"${s}_VSA$iteration.dot"), - toDot(IRProgram, IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> vsaResult(b).toString).toMap) - ) - } - - Logger.debug("[!] Running Steensgaard") - val steensgaardSolver = InterprocSteensgaardAnalysis(interDomain.toSet, mmm, SSAResults) - steensgaardSolver.analyze() - val steensgaardResults = steensgaardSolver.pointsTo() - - mmm.setCallSiteSummaries(steensgaardSolver.callSiteSummary) - - val paramResults: Map[Procedure, Set[Variable]] = ParamAnalysis(IRProgram).analyze() - val interLiveVarsResults: Map[CFGPosition, Map[Variable, TwoElement]] = InterLiveVarsAnalysis(IRProgram).analyze() - - StaticAnalysisContext( - intraProcConstProp = intraProcConstPropResult, - interProcConstProp = interProcConstPropResult, - memoryRegionResult = mraResult, - vsaResult = vsaResult, - interLiveVarsResults = interLiveVarsResults, - paramResults = paramResults, - steensgaardResults = steensgaardResults, - mmmResults = mmm, - symbolicAddresses = Map.empty, - reachingDefs = reachingDefinitionsAnalysisResults, - regionInjector = None, - localDSA = Map.empty, - bottomUpDSA = Map.empty, - topDownDSA = Map.empty, - writesToResult = writesTo, - ssaResults = SSAResults - ) - } - - def printAnalysisResults(prog: Program, result: Map[CFGPosition, _]): String = { - val results = mutable.ArrayBuffer[String]() - val toVisit = mutable.Stack[CFGPosition]() - val visited = mutable.HashSet[CFGPosition]() - toVisit.pushAll(prog.procedures) - - while (toVisit.nonEmpty) { - val next = toVisit.pop() - visited.add(next) - toVisit.pushAll( - IntraProcBlockIRCursor - .succ(next) - .diff(visited.collect[Block] { case b: Block => - b - }) - ) - - def contentStr(b: CFGPosition) = { - if result.contains(b) then "\n :: " + result(b) - else "" - } - - val t = next match - case p: Procedure => s"\nProcedure ${p.name}" - case b: Block => - Seq( - s" Block ${b.label}${contentStr(b)}", - b.statements - .map(s => { - " " + s.toString + contentStr(s) - }) - .mkString("\n"), - " " + b.jump.toString + contentStr(b.jump) - ).mkString("\n") - case s: Statement => s" Statement $s${contentStr(s)}" - case s: Jump => s" Jump $s${contentStr(s)}" - results.addOne(t) - } - results.mkString(System.lineSeparator()) - } -} - object RunUtils { def run(q: BASILConfig): BASILResult = { @@ -878,7 +579,7 @@ object RunUtils { val analysisResult = mutable.ArrayBuffer[StaticAnalysisContext]() while (modified) { Logger.debug("[!] Running Static Analysis") - val result = StaticAnalysis.analyse(ctx, config, iteration, analysisResult.lastOption) + val result = analysis.AnalysisPipelineMRA.analyse(ctx, config, iteration, analysisResult.lastOption) val previousResult = analysisResult.lastOption analysisResult.append(result) StaticAnalysisLogger.info("[!] Replacing Indirect Calls") diff --git a/src/test/scala/PointsToTest.scala b/src/test/scala/PointsToTest.scala index cfcf725a8b..fed2128af2 100644 --- a/src/test/scala/PointsToTest.scala +++ b/src/test/scala/PointsToTest.scala @@ -6,7 +6,7 @@ import org.scalatest.* import org.scalatest.funsuite.* import specification.* import test_util.CaptureOutput -import util.{IRContext, StaticAnalysis, StaticAnalysisConfig, StaticAnalysisContext} +import util.{IRContext, StaticAnalysisConfig, StaticAnalysisContext} @test_util.tags.DisabledTest class PointsToTest extends AnyFunSuite with CaptureOutput with OneInstancePerTest { @@ -28,7 +28,7 @@ class PointsToTest extends AnyFunSuite with CaptureOutput with OneInstancePerTes Specification(Set(), Set(), Map(), List(), List(), List(), Set()), program ) - StaticAnalysis.analyse(ctx, StaticAnalysisConfig(), 1) + analysis.AnalysisPipelineMRA.analyse(ctx, StaticAnalysisConfig(), 1) } /** Test that the analysis correctly identifies the stack pointer even when it is aliased From 35730ff42bcc07dbce78cb9333b7b3904aef51d7 Mon Sep 17 00:00:00 2001 From: Alistair Michael Date: Thu, 3 Jul 2025 17:33:41 +1000 Subject: [PATCH 21/30] move irloader and analyses to separate file --- .../scala/analysis/AnalysePipelineMRA.scala | 2 - src/main/scala/analysis/Legacy.scala | 26 +++ .../DataStructureAnalysis.scala | 9 + .../data_structure_analysis/IntervalDSA.scala | 2 +- src/main/scala/ir/IRLoading.scala | 165 ++++++++++++++++ src/main/scala/ir/eval/InterpretBasilIR.scala | 2 +- .../scala/ir/eval/InterpretBreakpoints.scala | 2 +- src/main/scala/ir/eval/InterpretRLimit.scala | 3 +- src/main/scala/ir/eval/InterpretTrace.scala | 3 +- src/main/scala/ir/parsing/Attrib.scala | 2 +- .../ir/parsing/BasilMainBNFCVisitor.scala | 10 +- .../scala/ir/transforms/LinuxAssertFail.scala | 3 +- .../ir/transforms/ProcedureParameters.scala | 2 +- src/main/scala/ir/transforms/Transform.scala | 4 +- .../scala/translating/PrettyPrinter.scala | 8 +- src/main/scala/util/BASILConfig.scala | 2 + src/main/scala/util/RunUtils.scala | 183 +----------------- src/test/scala/ConditionLiftingTests.scala | 2 +- .../scala/DataStructureAnalysisTest.scala | 12 +- src/test/scala/DifferentialAnalysisTest.scala | 4 +- src/test/scala/IndirectCallTests.scala | 4 +- src/test/scala/InterpretTestConstProp.scala | 4 +- src/test/scala/IntervalDSATest.scala | 2 +- src/test/scala/IrreducibleLoop.scala | 4 +- src/test/scala/MemoryTransformTests.scala | 1 + src/test/scala/PCTrackingTest.scala | 4 +- src/test/scala/PointsToTest.scala | 5 +- src/test/scala/RemovePCTest.scala | 2 +- src/test/scala/SVATest.scala | 1 + src/test/scala/TestKnownBitsInterpreter.scala | 2 +- src/test/scala/ir/InterpreterTests.scala | 2 +- src/test/scala/test_util/BASILTest.scala | 2 +- src/test/scala/test_util/Context.scala | 3 +- .../TestValueDomainWithInterpreter.scala | 3 +- 34 files changed, 247 insertions(+), 238 deletions(-) create mode 100644 src/main/scala/analysis/Legacy.scala create mode 100644 src/main/scala/ir/IRLoading.scala diff --git a/src/main/scala/analysis/AnalysePipelineMRA.scala b/src/main/scala/analysis/AnalysePipelineMRA.scala index 615ba1290d..810f7deb80 100644 --- a/src/main/scala/analysis/AnalysePipelineMRA.scala +++ b/src/main/scala/analysis/AnalysePipelineMRA.scala @@ -7,10 +7,8 @@ import specification.* import util.{ AnalysisResultDotLogger, DebugDumpIRLogger, - IRContext, Logger, StaticAnalysisConfig, - StaticAnalysisContext, StaticAnalysisLogger, writeToFile } diff --git a/src/main/scala/analysis/Legacy.scala b/src/main/scala/analysis/Legacy.scala new file mode 100644 index 0000000000..5162488423 --- /dev/null +++ b/src/main/scala/analysis/Legacy.scala @@ -0,0 +1,26 @@ +package analysis + +import analysis.Interval as _ +import analysis.data_structure_analysis.* +import ir.* + +import scala.jdk.CollectionConverters.* + +case class StaticAnalysisContext( + intraProcConstProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], + interProcConstProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], + memoryRegionResult: Map[CFGPosition, ((Set[StackRegion], Set[Variable]), Set[HeapRegion])], + vsaResult: Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]], + interLiveVarsResults: Map[CFGPosition, Map[Variable, TwoElement]], + paramResults: Map[Procedure, Set[Variable]], + steensgaardResults: Map[RegisterWrapperEqualSets, Set[RegisterWrapperEqualSets | MemoryRegion]], + mmmResults: MemoryModelMap, + reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], + regionInjector: Option[RegionInjector], + symbolicAddresses: Map[CFGPosition, Map[SymbolicAddress, TwoElement]], + localDSA: Map[Procedure, Graph], + bottomUpDSA: Map[Procedure, Graph], + topDownDSA: Map[Procedure, Graph], + writesToResult: Map[Procedure, Set[GlobalVar]], + ssaResults: Map[CFGPosition, (Map[Variable, FlatElement[Int]], Map[Variable, FlatElement[Int]])] +) diff --git a/src/main/scala/analysis/data_structure_analysis/DataStructureAnalysis.scala b/src/main/scala/analysis/data_structure_analysis/DataStructureAnalysis.scala index c9981a1f15..f72094814b 100644 --- a/src/main/scala/analysis/data_structure_analysis/DataStructureAnalysis.scala +++ b/src/main/scala/analysis/data_structure_analysis/DataStructureAnalysis.scala @@ -8,6 +8,15 @@ import util.assertion.* import scala.collection.mutable +case class DSAContext( + sva: Map[Procedure, SymValues[DSInterval]], + constraints: Map[Procedure, Set[Constraint]], + local: Map[Procedure, IntervalGraph], + bottomUp: Map[Procedure, IntervalGraph], + topDown: Map[Procedure, IntervalGraph], + globals: Map[IntervalNode, IntervalNode] +) + /** Data Structure Analysis Performs all phases of DSA and stores the results in member variables local, bottom-up, * top-down results in member variables locals, bu and td respectively. * @param program diff --git a/src/main/scala/analysis/data_structure_analysis/IntervalDSA.scala b/src/main/scala/analysis/data_structure_analysis/IntervalDSA.scala index aab24b825c..9d69693145 100644 --- a/src/main/scala/analysis/data_structure_analysis/IntervalDSA.scala +++ b/src/main/scala/analysis/data_structure_analysis/IntervalDSA.scala @@ -12,7 +12,7 @@ import specification.{ExternalFunction, FuncEntry, SymbolTableEntry} import util.DSAConfig.{Checks, Standard} import util.LogLevel.INFO import util.assertion.* -import util.{DSAConfig, DSAContext, DSALogger, IRContext, PerformanceTimer} +import util.{DSAConfig, DSALogger, PerformanceTimer} import scala.collection.mutable import scala.collection.mutable.ArrayBuffer diff --git a/src/main/scala/ir/IRLoading.scala b/src/main/scala/ir/IRLoading.scala new file mode 100644 index 0000000000..ba53fee02c --- /dev/null +++ b/src/main/scala/ir/IRLoading.scala @@ -0,0 +1,165 @@ +package ir + +import Parsers.* +import analysis.Interval as _ +import bap.* +import boogie.* +import com.grammatech.gtirb.proto.IR.IR +import gtirb.* +import ir.* +import org.antlr.v4.runtime.{BailErrorStrategy, CharStreams, CommonTokenStream} +import specification.* +import translating.* +import util.{ILLoadingConfig, Logger} + +import java.io.FileInputStream +import scala.jdk.CollectionConverters.* + +enum FrontendMode { + case Bap + case Gtirb + case Basil +} + +/** Stores the IR Program loaded from the binary and ELF tables, which is modified during analysis and program + * transformation. + */ +case class IRContext( + symbols: List[ELFSymbol], + externalFunctions: Set[ExternalFunction], + globals: Set[SpecGlobal], + funcEntries: Set[FuncEntry], + globalOffsets: Map[BigInt, BigInt], + specification: Specification, + program: Program // internally mutable +) + +object IRLoading { + + /** Create a context from just an IR program. + */ + def load(p: Program): IRContext = { + IRContext( + List.empty, + Set.empty, + Set.empty, + Set.empty, + Map.empty, + IRLoading.loadSpecification(None, p, Set.empty), + p + ) + } + + /** Load a program from files using the provided configuration. + */ + def load(q: ILLoadingConfig): IRContext = { + + val mode = if q.inputFile.endsWith(".gts") then { + FrontendMode.Gtirb + } else if q.inputFile.endsWith(".adt") then { + FrontendMode.Bap + } else if (q.inputFile.endsWith(".il")) { + FrontendMode.Basil + } else { + throw Exception(s"input file name ${q.inputFile} must be an .adt or .gts file") + } + + val (mainAddress, makeContext) = q.relfFile match { + case Some(relf) => { + // TODO: this tuple is large, should be a case class + val (symbols, externalFunctions, globals, funcEntries, globalOffsets, mainAddress) = + IRLoading.loadReadELF(relf, q) + + def continuation(ctx: IRContext) = + val specification = IRLoading.loadSpecification(q.specFile, ctx.program, globals) + IRContext(symbols, externalFunctions, globals, funcEntries, globalOffsets, specification, ctx.program) + + (Some(mainAddress), continuation) + } + case None if mode == FrontendMode.Gtirb => { + Logger.warn("RELF not provided, recommended for GTIRB input") + (None, (x: IRContext) => x) + } + case None => { + (None, (x: IRContext) => x) + } + } + + val program: IRContext = (mode, mainAddress) match { + case (FrontendMode.Gtirb, _) => IRLoading.load(loadGTIRB(q.inputFile, mainAddress, Some(q.mainProcedureName))) + case (FrontendMode.Basil, _) => ir.parsing.ParseBasilIL.loadILFile(q.inputFile) + case (FrontendMode.Bap, None) => throw Exception("relf is required when using BAP input") + case (FrontendMode.Bap, Some(mainAddress)) => { + val bapProgram = loadBAP(q.inputFile) + IRLoading.load(BAPToIR(bapProgram, mainAddress).translate) + } + } + + val ctx = makeContext(program) + mode match { + case FrontendMode.Basil => { + ctx.program.procedures.foreach(_.updateBlockSuffix()) + Logger.info("[!] Disabling PC tracking transforms due to IL input") + } + case _ => { + ir.transforms.PCTracking.applyPCTracking(q.pcTracking, ctx.program) + ctx.program.procedures.foreach(_.normaliseBlockNames()) + } + } + ctx + } + + def loadBAP(fileName: String): BAPProgram = { + val ADTLexer = BAP_ADTLexer(CharStreams.fromFileName(fileName)) + val tokens = CommonTokenStream(ADTLexer) + val parser = BAP_ADTParser(tokens) + + parser.setBuildParseTree(true) + + BAPLoader().visitProject(parser.project()) + } + + def loadGTIRB(fileName: String, mainAddress: Option[BigInt], mainName: Option[String] = None): Program = { + val fIn = FileInputStream(fileName) + val ir = IR.parseFrom(fIn) + val mods = ir.modules + val cfg = ir.cfg.get + + val semanticsJson = mods.map(_.auxData("ast").data.toStringUtf8) + + val semantics = semanticsJson.map(upickle.default.read[Map[String, List[InsnSemantics]]](_)) + + val parserMap: Map[String, List[InsnSemantics]] = semantics.flatten.toMap + + val GTIRBConverter = GTIRBToIR(mods, parserMap, cfg, mainAddress, mainName) + GTIRBConverter.createIR() + } + + def loadReadELF( + fileName: String, + config: ILLoadingConfig + ): (List[ELFSymbol], Set[ExternalFunction], Set[SpecGlobal], Set[FuncEntry], Map[BigInt, BigInt], BigInt) = { + val lexer = ReadELFLexer(CharStreams.fromFileName(fileName)) + val tokens = CommonTokenStream(lexer) + val parser = ReadELFParser(tokens) + parser.setErrorHandler(BailErrorStrategy()) + parser.setBuildParseTree(true) + ReadELFLoader.visitSyms(parser.syms(), config) + } + + def emptySpecification(globals: Set[SpecGlobal]) = + Specification(Set(), globals, Map(), List(), List(), List(), Set()) + + def loadSpecification(filename: Option[String], program: Program, globals: Set[SpecGlobal]): Specification = { + filename match { + case Some(s) => + val specLexer = SpecificationsLexer(CharStreams.fromFileName(s)) + val specTokens = CommonTokenStream(specLexer) + val specParser = SpecificationsParser(specTokens) + specParser.setBuildParseTree(true) + val specLoader = SpecificationLoader(globals, program) + specLoader.visitSpecification(specParser.specification()) + case None => emptySpecification(globals) + } + } +} diff --git a/src/main/scala/ir/eval/InterpretBasilIR.scala b/src/main/scala/ir/eval/InterpretBasilIR.scala index aa7a1afc72..e6a931b97a 100644 --- a/src/main/scala/ir/eval/InterpretBasilIR.scala +++ b/src/main/scala/ir/eval/InterpretBasilIR.scala @@ -1,9 +1,9 @@ package ir.eval import boogie.Scope import ir.* +import util.Logger import util.functional.* import util.functional.State.* -import util.{IRContext, Logger} import scala.collection.immutable diff --git a/src/main/scala/ir/eval/InterpretBreakpoints.scala b/src/main/scala/ir/eval/InterpretBreakpoints.scala index 70efcb98ad..ad05fc4c6b 100644 --- a/src/main/scala/ir/eval/InterpretBreakpoints.scala +++ b/src/main/scala/ir/eval/InterpretBreakpoints.scala @@ -1,9 +1,9 @@ package ir.eval import ir.* +import util.Logger import util.functional.* import util.functional.State.* -import util.{IRContext, Logger} import scala.collection.immutable diff --git a/src/main/scala/ir/eval/InterpretRLimit.scala b/src/main/scala/ir/eval/InterpretRLimit.scala index 3df7c793b1..29d21adb76 100644 --- a/src/main/scala/ir/eval/InterpretRLimit.scala +++ b/src/main/scala/ir/eval/InterpretRLimit.scala @@ -1,7 +1,6 @@ package ir.eval -import ir.* -import util.IRContext +import ir.{IRContext, *} import util.functional.* import util.functional.State.* diff --git a/src/main/scala/ir/eval/InterpretTrace.scala b/src/main/scala/ir/eval/InterpretTrace.scala index de4a9e21b0..33c0b00125 100644 --- a/src/main/scala/ir/eval/InterpretTrace.scala +++ b/src/main/scala/ir/eval/InterpretTrace.scala @@ -1,8 +1,7 @@ package ir.eval import boogie.Scope -import ir.* -import util.IRContext +import ir.{IRContext, *} import util.functional.* import util.functional.State.* diff --git a/src/main/scala/ir/parsing/Attrib.scala b/src/main/scala/ir/parsing/Attrib.scala index a072b0309d..5e13cc3e95 100644 --- a/src/main/scala/ir/parsing/Attrib.scala +++ b/src/main/scala/ir/parsing/Attrib.scala @@ -241,7 +241,7 @@ case class SymbolTableInfo( } object SymbolTableInfo { - def from(e: util.IRContext) = { + def from(e: ir.IRContext) = { SymbolTableInfo(e.externalFunctions, e.globals, e.funcEntries, e.globalOffsets) } diff --git a/src/main/scala/ir/parsing/BasilMainBNFCVisitor.scala b/src/main/scala/ir/parsing/BasilMainBNFCVisitor.scala index 48e66401c8..af0474b0c4 100644 --- a/src/main/scala/ir/parsing/BasilMainBNFCVisitor.scala +++ b/src/main/scala/ir/parsing/BasilMainBNFCVisitor.scala @@ -294,7 +294,7 @@ case class BasilMainBNFCVisitor[A]( val makeVisitor: (String, Declarations) => InnerBasilBNFCVisitor[A] = InnerBasilBNFCVisitor[A](_, _) ) extends LiteralsBNFCVisitor[A] with TypesBNFCVisitor[A] - with syntax.Module.Visitor[util.IRContext, A] + with syntax.Module.Visitor[ir.IRContext, A] with syntax.Declaration.Visitor[ir.dsl.EventuallyProcedure, A] with syntax.Params.Visitor[ir.LocalVar, A] with syntax.ProcSig.Visitor[ir.dsl.EventuallyProcedure, A] @@ -451,13 +451,13 @@ case class BasilMainBNFCVisitor[A]( } val resolvedProg = ir.dsl.EventuallyProgram(mainProcDef.head, otherProcs, initialMemory).resolve - util.IRContext( + ir.IRContext( List(), decls.symtab.externalFunctions, decls.symtab.globals, decls.symtab.funcEntries, decls.symtab.globalOffsets, - util.IRLoading.emptySpecification(decls.symtab.globals), + ir.IRLoading.emptySpecification(decls.symtab.globals), resolvedProg ) } @@ -486,12 +486,12 @@ object ParseBasilIL { result } - def loadILFile(filePath: String): util.IRContext = { + def loadILFile(filePath: String): ir.IRContext = { val reader = new FileReader(filePath) loadILReader(reader) } - def loadILString(text: String): util.IRContext = { + def loadILString(text: String): ir.IRContext = { val reader = new StringReader(text) loadILReader(reader) } diff --git a/src/main/scala/ir/transforms/LinuxAssertFail.scala b/src/main/scala/ir/transforms/LinuxAssertFail.scala index b8f41a3941..8ee2ed84f7 100644 --- a/src/main/scala/ir/transforms/LinuxAssertFail.scala +++ b/src/main/scala/ir/transforms/LinuxAssertFail.scala @@ -1,9 +1,8 @@ package ir.transforms -import ir.* import ir.cilvisitor.* import ir.eval.* -import util.IRContext +import ir.{IRContext, *} import util.functional.* def liftLinuxAssertFail(ctx: IRContext) = { diff --git a/src/main/scala/ir/transforms/ProcedureParameters.scala b/src/main/scala/ir/transforms/ProcedureParameters.scala index b246527697..d5c7b87a99 100644 --- a/src/main/scala/ir/transforms/ProcedureParameters.scala +++ b/src/main/scala/ir/transforms/ProcedureParameters.scala @@ -111,7 +111,7 @@ object DefinedOnAllPaths { } } -def liftProcedureCallAbstraction(ctx: util.IRContext): util.IRContext = { +def liftProcedureCallAbstraction(ctx: ir.IRContext): ir.IRContext = { transforms.clearParams(ctx.program) diff --git a/src/main/scala/ir/transforms/Transform.scala b/src/main/scala/ir/transforms/Transform.scala index 259cee82cb..9dcf151b47 100644 --- a/src/main/scala/ir/transforms/Transform.scala +++ b/src/main/scala/ir/transforms/Transform.scala @@ -1,9 +1,9 @@ package ir.transforms import analysis.AnalysisManager -import ir.dotBlockGraph +import ir.{IRContext, dotBlockGraph} import translating.PrettyPrinter.pp_prog -import util.{DebugDumpIRLogger, IRContext, Logger, PerformanceTimer} +import util.{DebugDumpIRLogger, Logger, PerformanceTimer} import java.io.File diff --git a/src/main/scala/translating/PrettyPrinter.scala b/src/main/scala/translating/PrettyPrinter.scala index 16b7111efc..d785155ec4 100644 --- a/src/main/scala/translating/PrettyPrinter.scala +++ b/src/main/scala/translating/PrettyPrinter.scala @@ -9,7 +9,7 @@ private val localSigils = false object PrettyPrinter { - type PrettyPrintable = Program | Procedure | Statement | Jump | Command | Block | Expr | util.IRContext + type PrettyPrintable = Program | Procedure | Statement | Jump | Command | Block | Expr | IRContext extension (b: BigInt) { def pprint: String = "0x%x".format(b) @@ -22,10 +22,10 @@ object PrettyPrinter { case e: Block => pp_block(e) case e: Procedure => pp_proc(e) case e: Program => pp_prog(e) - case e: util.IRContext => pp_irctx(e) + case e: IRContext => pp_irctx(e) } - def pp_irctx(e: util.IRContext) = BasilIRPrettyPrinter().vcontext(e).toString + def pp_irctx(e: IRContext) = BasilIRPrettyPrinter().vcontext(e).toString def pp_expr(e: Expr) = BasilIRPrettyPrinter()(e) def pp_stmt(s: Statement) = BasilIRPrettyPrinter()(s) def pp_cmd(c: Command) = c match { @@ -228,7 +228,7 @@ class BasilIRPrettyPrinter( }.toSet } - def vcontext(i: util.IRContext) = { + def vcontext(i: IRContext) = { val prog = vprog(i.program) import ir.parsing.Attrib import ir.parsing.MemoryAttribData diff --git a/src/main/scala/util/BASILConfig.scala b/src/main/scala/util/BASILConfig.scala index 6b02b806e8..0f9c2a1dea 100644 --- a/src/main/scala/util/BASILConfig.scala +++ b/src/main/scala/util/BASILConfig.scala @@ -1,5 +1,7 @@ package util +import ir.IRContext + enum ProcRelyVersion { case Function, IfCommandContradiction } diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index 644ae0618c..6e2b73563c 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -1,17 +1,12 @@ package util -import Parsers.* import analysis.data_structure_analysis.* import analysis.{AnalysisManager, Interval as _, *} -import bap.* import boogie.* -import com.grammatech.gtirb.proto.IR.IR -import gtirb.* import ir.* import ir.dsl.given import ir.eval.* import ir.transforms.* -import org.antlr.v4.runtime.{BailErrorStrategy, CharStreams, CommonTokenStream} import specification.* import translating.* import translating.PrettyPrinter.* @@ -19,7 +14,7 @@ import util.DSAConfig.Prereq import util.LogLevel.INFO import util.{DebugDumpIRLogger, Logger} -import java.io.{BufferedWriter, File, FileInputStream, FileWriter, PrintWriter} +import java.io.{BufferedWriter, File, FileWriter, PrintWriter} import java.nio.file.{Files, Paths} import scala.collection.mutable import scala.collection.mutable.ArrayBuffer @@ -30,55 +25,8 @@ import cilvisitor.* /** This file contains the main program execution. See RunUtils.loadAndTranslate for the high-level process. */ -/** Stores the IR Program loaded from the binary and ELF tables, which is modified during analysis and program - * transformation. - */ -case class IRContext( - symbols: List[ELFSymbol], - externalFunctions: Set[ExternalFunction], - globals: Set[SpecGlobal], - funcEntries: Set[FuncEntry], - globalOffsets: Map[BigInt, BigInt], - specification: Specification, - program: Program // internally mutable -) - -enum FrontendMode { - case Bap - case Gtirb - case Basil -} - /** Stores the results of the static analyses. */ -case class StaticAnalysisContext( - intraProcConstProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], - interProcConstProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], - memoryRegionResult: Map[CFGPosition, ((Set[StackRegion], Set[Variable]), Set[HeapRegion])], - vsaResult: Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]], - interLiveVarsResults: Map[CFGPosition, Map[Variable, TwoElement]], - paramResults: Map[Procedure, Set[Variable]], - steensgaardResults: Map[RegisterWrapperEqualSets, Set[RegisterWrapperEqualSets | MemoryRegion]], - mmmResults: MemoryModelMap, - reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], - regionInjector: Option[RegionInjector], - symbolicAddresses: Map[CFGPosition, Map[SymbolicAddress, TwoElement]], - localDSA: Map[Procedure, Graph], - bottomUpDSA: Map[Procedure, Graph], - topDownDSA: Map[Procedure, Graph], - writesToResult: Map[Procedure, Set[GlobalVar]], - ssaResults: Map[CFGPosition, (Map[Variable, FlatElement[Int]], Map[Variable, FlatElement[Int]])] -) - -case class DSAContext( - sva: Map[Procedure, SymValues[DSInterval]], - constraints: Map[Procedure, Set[Constraint]], - local: Map[Procedure, IntervalGraph], - bottomUp: Map[Procedure, IntervalGraph], - topDown: Map[Procedure, IntervalGraph], - globals: Map[IntervalNode, IntervalNode] -) - /** Results of the main program execution. */ case class BASILResult( @@ -90,135 +38,6 @@ case class BASILResult( /** Tools for loading the IR program into an IRContext. */ -object IRLoading { - - /** Create a context from just an IR program. - */ - def load(p: Program): IRContext = { - IRContext( - List.empty, - Set.empty, - Set.empty, - Set.empty, - Map.empty, - IRLoading.loadSpecification(None, p, Set.empty), - p - ) - } - - /** Load a program from files using the provided configuration. - */ - def load(q: ILLoadingConfig): IRContext = { - - val mode = if q.inputFile.endsWith(".gts") then { - FrontendMode.Gtirb - } else if q.inputFile.endsWith(".adt") then { - FrontendMode.Bap - } else if (q.inputFile.endsWith(".il")) { - FrontendMode.Basil - } else { - throw Exception(s"input file name ${q.inputFile} must be an .adt or .gts file") - } - - val (mainAddress, makeContext) = q.relfFile match { - case Some(relf) => { - // TODO: this tuple is large, should be a case class - val (symbols, externalFunctions, globals, funcEntries, globalOffsets, mainAddress) = - IRLoading.loadReadELF(relf, q) - - def continuation(ctx: IRContext) = - val specification = IRLoading.loadSpecification(q.specFile, ctx.program, globals) - IRContext(symbols, externalFunctions, globals, funcEntries, globalOffsets, specification, ctx.program) - - (Some(mainAddress), continuation) - } - case None if mode == FrontendMode.Gtirb => { - Logger.warn("RELF not provided, recommended for GTIRB input") - (None, (x: IRContext) => x) - } - case None => { - (None, (x: IRContext) => x) - } - } - - val program: IRContext = (mode, mainAddress) match { - case (FrontendMode.Gtirb, _) => IRLoading.load(loadGTIRB(q.inputFile, mainAddress, Some(q.mainProcedureName))) - case (FrontendMode.Basil, _) => ir.parsing.ParseBasilIL.loadILFile(q.inputFile) - case (FrontendMode.Bap, None) => throw Exception("relf is required when using BAP input") - case (FrontendMode.Bap, Some(mainAddress)) => { - val bapProgram = loadBAP(q.inputFile) - IRLoading.load(BAPToIR(bapProgram, mainAddress).translate) - } - } - - val ctx = makeContext(program) - mode match { - case FrontendMode.Basil => { - ctx.program.procedures.foreach(_.updateBlockSuffix()) - Logger.info("[!] Disabling PC tracking transforms due to IL input") - } - case _ => { - ir.transforms.PCTracking.applyPCTracking(q.pcTracking, ctx.program) - ctx.program.procedures.foreach(_.normaliseBlockNames()) - } - } - ctx - } - - def loadBAP(fileName: String): BAPProgram = { - val ADTLexer = BAP_ADTLexer(CharStreams.fromFileName(fileName)) - val tokens = CommonTokenStream(ADTLexer) - val parser = BAP_ADTParser(tokens) - - parser.setBuildParseTree(true) - - BAPLoader().visitProject(parser.project()) - } - - def loadGTIRB(fileName: String, mainAddress: Option[BigInt], mainName: Option[String] = None): Program = { - val fIn = FileInputStream(fileName) - val ir = IR.parseFrom(fIn) - val mods = ir.modules - val cfg = ir.cfg.get - - val semanticsJson = mods.map(_.auxData("ast").data.toStringUtf8) - - val semantics = semanticsJson.map(upickle.default.read[Map[String, List[InsnSemantics]]](_)) - - val parserMap: Map[String, List[InsnSemantics]] = semantics.flatten.toMap - - val GTIRBConverter = GTIRBToIR(mods, parserMap, cfg, mainAddress, mainName) - GTIRBConverter.createIR() - } - - def loadReadELF( - fileName: String, - config: ILLoadingConfig - ): (List[ELFSymbol], Set[ExternalFunction], Set[SpecGlobal], Set[FuncEntry], Map[BigInt, BigInt], BigInt) = { - val lexer = ReadELFLexer(CharStreams.fromFileName(fileName)) - val tokens = CommonTokenStream(lexer) - val parser = ReadELFParser(tokens) - parser.setErrorHandler(BailErrorStrategy()) - parser.setBuildParseTree(true) - ReadELFLoader.visitSyms(parser.syms(), config) - } - - def emptySpecification(globals: Set[SpecGlobal]) = - Specification(Set(), globals, Map(), List(), List(), List(), Set()) - - def loadSpecification(filename: Option[String], program: Program, globals: Set[SpecGlobal]): Specification = { - filename match { - case Some(s) => - val specLexer = SpecificationsLexer(CharStreams.fromFileName(s)) - val specTokens = CommonTokenStream(specLexer) - val specParser = SpecificationsParser(specTokens) - specParser.setBuildParseTree(true) - val specLoader = SpecificationLoader(globals, program) - specLoader.visitSpecification(specParser.specification()) - case None => emptySpecification(globals) - } - } -} object RunUtils { diff --git a/src/test/scala/ConditionLiftingTests.scala b/src/test/scala/ConditionLiftingTests.scala index 6ae8fdd185..2f4a152fe2 100644 --- a/src/test/scala/ConditionLiftingTests.scala +++ b/src/test/scala/ConditionLiftingTests.scala @@ -817,7 +817,7 @@ class ConditionLiftingRegressionTest extends AnyFunSuite with test_util.CaptureO test("conds inline test") { - var ctx = util.IRLoading.load(testProgram) + var ctx = ir.IRLoading.load(testProgram) ir.transforms.doCleanupWithSimplify(ctx, AnalysisManager(ctx.program)) ir.transforms.clearParams(ctx.program) diff --git a/src/test/scala/DataStructureAnalysisTest.scala b/src/test/scala/DataStructureAnalysisTest.scala index 4f4e03899e..d2a08d7462 100644 --- a/src/test/scala/DataStructureAnalysisTest.scala +++ b/src/test/scala/DataStructureAnalysisTest.scala @@ -1,3 +1,4 @@ +import analysis.StaticAnalysisContext import analysis.data_structure_analysis.* import boogie.SpecGlobal import ir.* @@ -6,16 +7,7 @@ import org.scalatest.funsuite.AnyFunSuite import specification.Specification import test_util.{BASILTest, CaptureOutput} import translating.PrettyPrinter.* -import util.{ - BASILConfig, - BASILResult, - BoogieGeneratorConfig, - ILLoadingConfig, - IRContext, - RunUtils, - StaticAnalysisConfig, - StaticAnalysisContext -} +import util.{BASILConfig, BASILResult, BoogieGeneratorConfig, ILLoadingConfig, RunUtils, StaticAnalysisConfig} /** This is the test suite for testing DSA functionality The tests follow a general pattern of running BASIL analyses on * a test program and then asserting properties about the Data Structure Graph (DSG) of the function produced at diff --git a/src/test/scala/DifferentialAnalysisTest.scala b/src/test/scala/DifferentialAnalysisTest.scala index 833c196fac..052bc23a3f 100644 --- a/src/test/scala/DifferentialAnalysisTest.scala +++ b/src/test/scala/DifferentialAnalysisTest.scala @@ -1,10 +1,10 @@ import analysis.AnalysisManager -import ir.* import ir.eval.{ExecEffect, *} +import ir.{IRContext, IRLoading, *} import org.scalatest.* import org.scalatest.funsuite.* import test_util.* -import util.{ILLoadingConfig, IRContext, IRLoading, LogLevel, Logger, RunUtils, StaticAnalysisConfig} +import util.{ILLoadingConfig, LogLevel, Logger, RunUtils, StaticAnalysisConfig} import java.io.File diff --git a/src/test/scala/IndirectCallTests.scala b/src/test/scala/IndirectCallTests.scala index bda42fbc83..30a0afec74 100644 --- a/src/test/scala/IndirectCallTests.scala +++ b/src/test/scala/IndirectCallTests.scala @@ -1,9 +1,9 @@ -import analysis.data_structure_analysis.* +import analysis.data_structure_analysis.{DSAContext, *} import ir.* import org.scalatest.funsuite.* import test_util.{BASILTest, CaptureOutput, TestConfig, TestCustomisation} import util.DSAConfig.Checks -import util.{BASILResult, DSAConfig, DSAContext, LogLevel, Logger, StaticAnalysisConfig} +import util.{BASILResult, DSAConfig, LogLevel, Logger, StaticAnalysisConfig} import scala.collection.mutable import scala.collection.mutable.ArrayBuffer diff --git a/src/test/scala/InterpretTestConstProp.scala b/src/test/scala/InterpretTestConstProp.scala index 4016c67962..cf738036c0 100644 --- a/src/test/scala/InterpretTestConstProp.scala +++ b/src/test/scala/InterpretTestConstProp.scala @@ -1,9 +1,9 @@ import analysis.* -import ir.* +import ir.{IRLoading, *} import org.scalatest.* import org.scalatest.funsuite.* import test_util.{BASILTest, CaptureOutput, TestValueDomainWithInterpreter} -import util.{ILLoadingConfig, IRLoading, LogLevel, Logger, RunUtils, StaticAnalysisConfig} +import util.{ILLoadingConfig, LogLevel, Logger, RunUtils, StaticAnalysisConfig} @test_util.tags.StandardSystemTest class InterpretTestConstProp diff --git a/src/test/scala/IntervalDSATest.scala b/src/test/scala/IntervalDSATest.scala index 77fb53f9fd..5300c5ad45 100644 --- a/src/test/scala/IntervalDSATest.scala +++ b/src/test/scala/IntervalDSATest.scala @@ -1,5 +1,5 @@ -import analysis.data_structure_analysis import analysis.data_structure_analysis.{DSInterval, Global, Heap, IntervalDSA, Par, Ret, Stack, SymBase} +import analysis.{StaticAnalysisContext, data_structure_analysis} import boogie.SpecGlobal import ir.Endian.LittleEndian import ir.dsl.{block, directCall, goto, proc, prog, ret} diff --git a/src/test/scala/IrreducibleLoop.scala b/src/test/scala/IrreducibleLoop.scala index 958cfbffe4..79765a1f00 100644 --- a/src/test/scala/IrreducibleLoop.scala +++ b/src/test/scala/IrreducibleLoop.scala @@ -1,9 +1,9 @@ import analysis.LoopDetector -import ir.{Block, Program, dotBlockGraph} +import ir.{Block, IRLoading, Program, dotBlockGraph} import org.scalatest.funsuite.AnyFunSuite import test_util.{BASILTest, CaptureOutput} import translating.BAPToIR -import util.{ILLoadingConfig, IRLoading, LogLevel, Logger} +import util.{ILLoadingConfig, LogLevel, Logger} import scala.sys.process.* diff --git a/src/test/scala/MemoryTransformTests.scala b/src/test/scala/MemoryTransformTests.scala index bc8c91d6d5..3203d7f09a 100644 --- a/src/test/scala/MemoryTransformTests.scala +++ b/src/test/scala/MemoryTransformTests.scala @@ -1,3 +1,4 @@ +import analysis.StaticAnalysisContext import boogie.SpecGlobal import ir.* import ir.Endian.LittleEndian diff --git a/src/test/scala/PCTrackingTest.scala b/src/test/scala/PCTrackingTest.scala index c31af649a9..9f65bd82a7 100644 --- a/src/test/scala/PCTrackingTest.scala +++ b/src/test/scala/PCTrackingTest.scala @@ -1,7 +1,7 @@ -import ir.* +import ir.{IRContext, *} import org.scalatest.funsuite.AnyFunSuite import test_util.{BASILTest, CaptureOutput} -import util.{BASILConfig, BoogieGeneratorConfig, ILLoadingConfig, IRContext, PCTrackingOption, StaticAnalysisConfig} +import util.{BASILConfig, BoogieGeneratorConfig, ILLoadingConfig, PCTrackingOption, StaticAnalysisConfig} @test_util.tags.UnitTest class PCTrackingTest extends AnyFunSuite with CaptureOutput { diff --git a/src/test/scala/PointsToTest.scala b/src/test/scala/PointsToTest.scala index fed2128af2..1bb1192713 100644 --- a/src/test/scala/PointsToTest.scala +++ b/src/test/scala/PointsToTest.scala @@ -1,12 +1,13 @@ +import analysis.StaticAnalysisContext import boogie.* -import ir.* import ir.Endian.LittleEndian import ir.dsl.* +import ir.{IRContext, *} import org.scalatest.* import org.scalatest.funsuite.* import specification.* import test_util.CaptureOutput -import util.{IRContext, StaticAnalysisConfig, StaticAnalysisContext} +import util.StaticAnalysisConfig @test_util.tags.DisabledTest class PointsToTest extends AnyFunSuite with CaptureOutput with OneInstancePerTest { diff --git a/src/test/scala/RemovePCTest.scala b/src/test/scala/RemovePCTest.scala index 944f33e0f9..82e1793a19 100644 --- a/src/test/scala/RemovePCTest.scala +++ b/src/test/scala/RemovePCTest.scala @@ -1,7 +1,7 @@ import ir.* import org.scalatest.funsuite.AnyFunSuite import test_util.{BASILTest, CaptureOutput} -import util.{BASILConfig, BoogieGeneratorConfig, ILLoadingConfig, IRContext, PCTrackingOption, StaticAnalysisConfig} +import util.{BASILConfig, BoogieGeneratorConfig, ILLoadingConfig, PCTrackingOption, StaticAnalysisConfig} @test_util.tags.UnitTest class RemovePCTest extends AnyFunSuite with CaptureOutput { diff --git a/src/test/scala/SVATest.scala b/src/test/scala/SVATest.scala index 72a1d93445..85933e4dc6 100644 --- a/src/test/scala/SVATest.scala +++ b/src/test/scala/SVATest.scala @@ -1,3 +1,4 @@ +import analysis.StaticAnalysisContext import analysis.data_structure_analysis.* import analysis.data_structure_analysis.given import boogie.SpecGlobal diff --git a/src/test/scala/TestKnownBitsInterpreter.scala b/src/test/scala/TestKnownBitsInterpreter.scala index 16d072d42d..556f482135 100644 --- a/src/test/scala/TestKnownBitsInterpreter.scala +++ b/src/test/scala/TestKnownBitsInterpreter.scala @@ -131,7 +131,7 @@ class TestKnownBitsInterpreter ) ) - val kbitsCtx = util.IRLoading.load(kbitsProg) + val kbitsCtx = ir.IRLoading.load(kbitsProg) def params(v1: BigInt, v2: BigInt) = Some( Seq( diff --git a/src/test/scala/ir/InterpreterTests.scala b/src/test/scala/ir/InterpreterTests.scala index 4fc54a484a..13c9ce4437 100644 --- a/src/test/scala/ir/InterpreterTests.scala +++ b/src/test/scala/ir/InterpreterTests.scala @@ -10,7 +10,7 @@ import org.scalatest.funsuite.AnyFunSuite import test_util.{BASILTest, CaptureOutput} import translating.PrettyPrinter.* import util.functional.* -import util.{ILLoadingConfig, IRContext, IRLoading, LogLevel, Logger, PerformanceTimer} +import util.{ILLoadingConfig, LogLevel, Logger, PerformanceTimer} import scala.language.implicitConversions diff --git a/src/test/scala/test_util/BASILTest.scala b/src/test/scala/test_util/BASILTest.scala index 7c3e328182..ad9bf8b847 100644 --- a/src/test/scala/test_util/BASILTest.scala +++ b/src/test/scala/test_util/BASILTest.scala @@ -1,5 +1,6 @@ package test_util +import ir.IRContext import org.scalatest.concurrent.ScaledTimeSpans import org.scalatest.time.{Seconds, Span} import util.boogie_interaction.* @@ -9,7 +10,6 @@ import util.{ BoogieGeneratorConfig, DSAConfig, ILLoadingConfig, - IRContext, Logger, RunUtils, StaticAnalysisConfig diff --git a/src/test/scala/test_util/Context.scala b/src/test/scala/test_util/Context.scala index 7d8ce53c16..cb8c089e5b 100644 --- a/src/test/scala/test_util/Context.scala +++ b/src/test/scala/test_util/Context.scala @@ -1,9 +1,8 @@ package test_util import boogie.SpecGlobal -import ir.{Program, cilvisitor, transforms} +import ir.{IRContext, Program, cilvisitor, transforms} import specification.Specification -import util.IRContext def programToContext( program: Program, diff --git a/src/test/scala/test_util/TestValueDomainWithInterpreter.scala b/src/test/scala/test_util/TestValueDomainWithInterpreter.scala index 16e6e390bc..074494b40b 100644 --- a/src/test/scala/test_util/TestValueDomainWithInterpreter.scala +++ b/src/test/scala/test_util/TestValueDomainWithInterpreter.scala @@ -1,9 +1,8 @@ package test_util -import ir.* import ir.eval.* +import ir.{IRContext, *} import translating.PrettyPrinter.* -import util.IRContext import util.functional.State trait TestValueDomainWithInterpreter[T] { From 5ea632d9b9b1d3791819d9c6632166cf51d49f11 Mon Sep 17 00:00:00 2001 From: Alistair Michael Date: Thu, 3 Jul 2025 17:38:07 +1000 Subject: [PATCH 22/30] move simplify to separate file --- src/main/scala/util/RunUtils.scala | 160 ------------------ src/test/scala/ConditionLiftingTests.scala | 2 +- src/test/scala/DifferentialAnalysisTest.scala | 2 +- 3 files changed, 2 insertions(+), 162 deletions(-) diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index 6e2b73563c..85b4565072 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -57,166 +57,6 @@ object RunUtils { } } - def doSimplify(ctx: IRContext, config: Option[StaticAnalysisConfig]): Unit = { - // writeToFile(dotBlockGraph(program, program.filter(_.isInstanceOf[Block]).map(b => b -> b.toString).toMap), s"blockgraph-before-simp.dot") - Logger.info("[!] Running Simplify") - val timer = PerformanceTimer("Simplify") - val program = ctx.program - - val foundLoops = LoopDetector.identify_loops(program) - val newLoops = foundLoops.reducibleTransformIR() - newLoops.updateIrWithLoops() - - for (p <- program.procedures) { - p.normaliseBlockNames() - } - - ctx.program.sortProceduresRPO() - - transforms.liftSVComp(ctx.program) - - config.foreach { - _.dumpILToPath.foreach { s => - DebugDumpIRLogger.writeToFile(File(s"${s}_il-before-simp.il"), pp_prog(program)) - } - } - - transforms.applyRPO(program) - - // example of printing a simple analysis - - transforms.removeEmptyBlocks(program) - transforms.coalesceBlocks(program) - transforms.removeEmptyBlocks(program) - - // transforms.coalesceBlocksCrossBranchDependency(program) - config.foreach { - _.analysisDotPath.foreach { s => - DebugDumpIRLogger.writeToFile(File(s"${s}_blockgraph-before-dsa.dot"), dotBlockGraph(program.mainProcedure)) - } - } - - Logger.info("[!] Simplify :: DynamicSingleAssignment") - config.foreach { - _.dumpILToPath.foreach { s => - DebugDumpIRLogger.writeToFile(File(s"${s}_il-before-dsa.il"), pp_prog(program)) - } - } - - transforms.OnePassDSA().applyTransform(program) - - // fixme: this used to be a plain function but now we have to supply an analysis manager! - transforms.inlinePLTLaunchpad(ctx, AnalysisManager(ctx.program)) - - transforms.removeEmptyBlocks(program) - - config.foreach { - _.analysisDotPath.foreach { s => - AnalysisResultDotLogger.writeToFile( - File(s"${s}_blockgraph-after-dsa.dot"), - dotBlockGraph( - program, - (program.collect { case b: Block => - b -> pp_block(b) - }).toMap - ) - ) - } - } - config.foreach { - _.dumpILToPath.foreach { s => - DebugDumpIRLogger.writeToFile(File(s"${s}_il-after-dsa.il"), pp_prog(program)) - } - } - - if (ir.eval.SimplifyValidation.validate) { - Logger.info("DSA no uninitialised") - assert(invariant.allVariablesAssignedIndex(program)) - // Logger.info("Live vars difftest") - // val tipLiveVars : Map[CFGPosition, Set[Variable]] = analysis.IntraLiveVarsAnalysis(program).analyze() - // assert(program.procedures.forall(transforms.difftestLiveVars(_, tipLiveVars))) - - Logger.info("DSA Check") - val x = program.procedures.forall(transforms.rdDSAProperty) - assert(x) - Logger.info("DSA Check passed") - assert(invariant.singleCallBlockEnd(program)) - assert(invariant.cfgCorrect(program)) - assert(invariant.blocksUniqueToEachProcedure(program)) - } - - config.foreach { - _.dumpILToPath.foreach { s => - DebugDumpIRLogger.writeToFile(File(s"${s}_il-before-copyprop.il"), pp_prog(program)) - } - } - - // brute force run the analysis twice because it cleans up more stuff - // assert(program.procedures.forall(transforms.rdDSAProperty)) - config.foreach { - _.analysisDotPath.foreach { s => - AnalysisResultDotLogger.writeToFile( - File(s"${s}_blockgraph-before-copyprop.dot"), - dotBlockGraph(program.mainProcedure) - ) - } - } - Logger.info("Copyprop Start") - transforms.copyPropParamFixedPoint(program, ctx.globalOffsets) - - transforms.fixupGuards(program) - transforms.removeDuplicateGuard(program) - config.foreach { - _.analysisDotPath.foreach { s => - AnalysisResultDotLogger.writeToFile( - File(s"${s}_blockgraph-after-simp.dot"), - dotBlockGraph(program.mainProcedure) - ) - } - } - - transforms.liftLinuxAssertFail(ctx) - - // assert(program.procedures.forall(transforms.rdDSAProperty)) - - assert(invariant.blockUniqueLabels(program)) - Logger.info(s"CopyProp ${timer.checkPoint("Simplify")} ms ") - - config.foreach { - _.dumpILToPath.foreach { s => - DebugDumpIRLogger.writeToFile(File(s"${s}_il-after-copyprop.il"), pp_prog(program)) - } - } - - // val x = program.procedures.forall(transforms.rdDSAProperty) - // assert(x) - if (ir.eval.SimplifyValidation.validate) { - Logger.info("DSA Check (after transform)") - val x = program.procedures.forall(transforms.rdDSAProperty) - assert(x) - Logger.info("DSA Check succeeded") - } - // run this after cond recovery because sign bit calculations often need high bits - // which go away in high level conss - config.foreach { - _.dumpILToPath.foreach { s => - DebugDumpIRLogger.writeToFile(File(s"${s}_il-after-slices.il"), pp_prog(program)) - } - } - - // re-apply dsa - // transforms.OnePassDSA().applyTransform(program) - - if (ir.eval.SimplifyValidation.validate) { - Logger.info("[!] Simplify :: Writing simplification validation") - val w = BufferedWriter(FileWriter("rewrites.smt2")) - ir.eval.SimplifyValidation.makeValidation(w) - w.close() - } - - Logger.info("[!] Simplify :: finished") - } - def loadAndTranslate(conf: BASILConfig, postLoad: IRContext => Unit = s => ()): BASILResult = { Logger.info("[!] Loading Program") val q = conf diff --git a/src/test/scala/ConditionLiftingTests.scala b/src/test/scala/ConditionLiftingTests.scala index 2f4a152fe2..c2c25d2815 100644 --- a/src/test/scala/ConditionLiftingTests.scala +++ b/src/test/scala/ConditionLiftingTests.scala @@ -822,7 +822,7 @@ class ConditionLiftingRegressionTest extends AnyFunSuite with test_util.CaptureO ir.transforms.doCleanupWithSimplify(ctx, AnalysisManager(ctx.program)) ir.transforms.clearParams(ctx.program) ctx = ir.transforms.liftProcedureCallAbstraction(ctx) - util.RunUtils.doSimplify(ctx, None) + ir.transforms.doSimplify(ctx, None) for (p <- ctx.program.procedures) { p.normaliseBlockNames() } diff --git a/src/test/scala/DifferentialAnalysisTest.scala b/src/test/scala/DifferentialAnalysisTest.scala index 052bc23a3f..ac4fd6cdc0 100644 --- a/src/test/scala/DifferentialAnalysisTest.scala +++ b/src/test/scala/DifferentialAnalysisTest.scala @@ -106,7 +106,7 @@ abstract class DifferentialTest extends AnyFunSuite, CaptureOutput, TestCustomis if (simplify) { ictx = ir.transforms.liftProcedureCallAbstraction(ictx) comparectx = ir.transforms.liftProcedureCallAbstraction(comparectx) - RunUtils.doSimplify(ictx, staticAnalysisConfig) + ir.transforms.doSimplify(ictx, staticAnalysisConfig) } diffTest(ictx, comparectx) From a378b284dd7d6d3bf56f56c6ac6f73f2b8ae8260 Mon Sep 17 00:00:00 2001 From: Alistair Michael Date: Thu, 3 Jul 2025 17:51:15 +1000 Subject: [PATCH 23/30] move analysis function to analysis pipeline file --- .../scala/analysis/AnalysePipelineMRA.scala | 105 +++++++++++ .../ir/transforms/SimplifyPipeline.scala | 166 ++++++++++++++++++ src/main/scala/util/RunUtils.scala | 103 +---------- .../scala/DataStructureAnalysisTest.scala | 6 +- src/test/scala/DifferentialAnalysisTest.scala | 6 +- src/test/scala/InterpretTestConstProp.scala | 6 +- src/test/scala/IntervalDSATest.scala | 4 +- src/test/scala/MemoryTransformTests.scala | 4 +- src/test/scala/SVATest.scala | 4 +- 9 files changed, 287 insertions(+), 117 deletions(-) create mode 100644 src/main/scala/ir/transforms/SimplifyPipeline.scala diff --git a/src/main/scala/analysis/AnalysePipelineMRA.scala b/src/main/scala/analysis/AnalysePipelineMRA.scala index 810f7deb80..92c5e2a66d 100644 --- a/src/main/scala/analysis/AnalysePipelineMRA.scala +++ b/src/main/scala/analysis/AnalysePipelineMRA.scala @@ -1,13 +1,16 @@ package analysis +import analysis.data_structure_analysis.{DataStructureAnalysis, SymbolicAddress, SymbolicAddressAnalysis} import analysis.{Interval as _, *} import boogie.* import ir.* import specification.* +import translating.PrettyPrinter.pp_prog import util.{ AnalysisResultDotLogger, DebugDumpIRLogger, Logger, + MemoryRegionsMode, StaticAnalysisConfig, StaticAnalysisLogger, writeToFile @@ -313,4 +316,106 @@ object AnalysisPipelineMRA { } results.mkString(System.lineSeparator()) } + + /** Use static analysis to resolve indirect calls and replace them in the IR until fixed point. + */ + def runToFixpoint(config: StaticAnalysisConfig, ctx: IRContext): StaticAnalysisContext = { + var iteration = 1 + var modified: Boolean = true + val analysisResult = mutable.ArrayBuffer[StaticAnalysisContext]() + while (modified) { + Logger.debug("[!] Running Static Analysis") + val result = analysis.AnalysisPipelineMRA.analyse(ctx, config, iteration, analysisResult.lastOption) + val previousResult = analysisResult.lastOption + analysisResult.append(result) + StaticAnalysisLogger.info("[!] Replacing Indirect Calls") + + /* + modified = transforms.SteensgaardIndirectCallResolution( + ctx.program, + result.steensgaardResults, + result.reachingDefs + ).resolveIndirectCalls() + */ + + if ( + config.memoryRegions == MemoryRegionsMode.MRA && (previousResult.isEmpty || result.vsaResult != previousResult.get.vsaResult) + ) { + modified = true + } else { + modified = + transforms.VSAIndirectCallResolution(ctx.program, result.vsaResult, result.mmmResults).resolveIndirectCalls() + } + + if (modified) { + iteration += 1 + StaticAnalysisLogger.info(s"[!] Analysing again (iter $iteration)") + } + } + + // should later move this to be inside while (modified) loop and have splitting threads cause further iterations + + if (config.threadSplit) { + transforms.splitThreads(ctx.program, analysisResult.last.steensgaardResults, analysisResult.last.ssaResults) + } + + val reachingDefs = ReachingDefsAnalysis(ctx.program, analysisResult.last.writesToResult).analyze() + config.analysisDotPath.foreach { s => + AnalysisResultDotLogger.writeToFile(File(s"${s}_ct.dot"), toDot(ctx.program)) + } + + StaticAnalysisLogger.info("[!] Running Symbolic Access Analysis") + val symResults: Map[CFGPosition, Map[SymbolicAddress, TwoElement]] = + SymbolicAddressAnalysis(ctx.program, analysisResult.last.interProcConstProp).analyze() + config.analysisDotPath.foreach { s => + val labels = symResults.map { (k, v) => k -> v.toString } + AnalysisResultDotLogger.writeToFile(File(s"${s}_saa.dot"), toDot(ctx.program, labels)) + } + + StaticAnalysisLogger.info("[!] Running DSA Analysis") + + writeToFile(pp_prog(ctx.program), "testo1.il") + val symbolTableEntries: Set[SymbolTableEntry] = ctx.globals ++ ctx.funcEntries + val dsa = DataStructureAnalysis( + ctx.program, + symResults, + analysisResult.last.interProcConstProp, + symbolTableEntries, + ctx.globalOffsets, + ctx.externalFunctions, + reachingDefs, + analysisResult.last.writesToResult, + analysisResult.last.paramResults + ) + dsa.analyze() + + config.analysisDotPath.foreach { s => + dsa.topDown(ctx.program.mainProcedure).toDot + DebugDumpIRLogger.writeToFile(File(s"${s}_main_dsg.dot"), dsa.topDown(ctx.program.mainProcedure).toDot) + } + + Logger.debug("[!] Injecting regions") + val regionInjector = if (config.memoryRegions == MemoryRegionsMode.MRA) { + val injector = RegionInjectorMRA(ctx.program, analysisResult.last.mmmResults) + injector.injectRegions() + Some(injector) + } else if (config.memoryRegions == MemoryRegionsMode.DSA) { + val injector = RegionInjectorDSA(ctx.program, dsa.topDown) + injector.injectRegions() + Some(injector) + } else { + None + } + + assert(invariant.singleCallBlockEnd(ctx.program)) + StaticAnalysisLogger.info(s"[!] Finished indirect call resolution after $iteration iterations") + analysisResult.last.copy( + symbolicAddresses = symResults, + localDSA = dsa.local.toMap, + bottomUpDSA = dsa.bottomUp.toMap, + topDownDSA = dsa.topDown.toMap, + regionInjector = regionInjector + ) + } + } diff --git a/src/main/scala/ir/transforms/SimplifyPipeline.scala b/src/main/scala/ir/transforms/SimplifyPipeline.scala new file mode 100644 index 0000000000..0a23d884b9 --- /dev/null +++ b/src/main/scala/ir/transforms/SimplifyPipeline.scala @@ -0,0 +1,166 @@ +package ir.transforms + +import analysis.{AnalysisManager, Interval as _, *} +import ir.* +import translating.* +import translating.PrettyPrinter.* +import util.{AnalysisResultDotLogger, DebugDumpIRLogger, Logger, PerformanceTimer, StaticAnalysisConfig} + +import java.io.{BufferedWriter, File, FileWriter} + +def doSimplify(ctx: IRContext, config: Option[StaticAnalysisConfig]): Unit = { + // writeToFile(dotBlockGraph(program, program.filter(_.isInstanceOf[Block]).map(b => b -> b.toString).toMap), s"blockgraph-before-simp.dot") + Logger.info("[!] Running Simplify") + val timer = PerformanceTimer("Simplify") + val program = ctx.program + + val foundLoops = LoopDetector.identify_loops(program) + val newLoops = foundLoops.reducibleTransformIR() + newLoops.updateIrWithLoops() + + for (p <- program.procedures) { + p.normaliseBlockNames() + } + + ctx.program.sortProceduresRPO() + + transforms.liftSVComp(ctx.program) + + config.foreach { + _.dumpILToPath.foreach { s => + DebugDumpIRLogger.writeToFile(File(s"${s}_il-before-simp.il"), pp_prog(program)) + } + } + + transforms.applyRPO(program) + + // example of printing a simple analysis + + transforms.removeEmptyBlocks(program) + transforms.coalesceBlocks(program) + transforms.removeEmptyBlocks(program) + + // transforms.coalesceBlocksCrossBranchDependency(program) + config.foreach { + _.analysisDotPath.foreach { s => + DebugDumpIRLogger.writeToFile(File(s"${s}_blockgraph-before-dsa.dot"), dotBlockGraph(program.mainProcedure)) + } + } + + Logger.info("[!] Simplify :: DynamicSingleAssignment") + config.foreach { + _.dumpILToPath.foreach { s => + DebugDumpIRLogger.writeToFile(File(s"${s}_il-before-dsa.il"), pp_prog(program)) + } + } + + transforms.OnePassDSA().applyTransform(program) + + // fixme: this used to be a plain function but now we have to supply an analysis manager! + transforms.inlinePLTLaunchpad(ctx, AnalysisManager(ctx.program)) + + transforms.removeEmptyBlocks(program) + + config.foreach { + _.analysisDotPath.foreach { s => + AnalysisResultDotLogger.writeToFile( + File(s"${s}_blockgraph-after-dsa.dot"), + dotBlockGraph( + program, + (program.collect { case b: Block => + b -> pp_block(b) + }).toMap + ) + ) + } + } + config.foreach { + _.dumpILToPath.foreach { s => + DebugDumpIRLogger.writeToFile(File(s"${s}_il-after-dsa.il"), pp_prog(program)) + } + } + + if (ir.eval.SimplifyValidation.validate) { + Logger.info("DSA no uninitialised") + assert(invariant.allVariablesAssignedIndex(program)) + // Logger.info("Live vars difftest") + // val tipLiveVars : Map[CFGPosition, Set[Variable]] = analysis.IntraLiveVarsAnalysis(program).analyze() + // assert(program.procedures.forall(transforms.difftestLiveVars(_, tipLiveVars))) + + Logger.info("DSA Check") + val x = program.procedures.forall(transforms.rdDSAProperty) + assert(x) + Logger.info("DSA Check passed") + assert(invariant.singleCallBlockEnd(program)) + assert(invariant.cfgCorrect(program)) + assert(invariant.blocksUniqueToEachProcedure(program)) + } + + config.foreach { + _.dumpILToPath.foreach { s => + DebugDumpIRLogger.writeToFile(File(s"${s}_il-before-copyprop.il"), pp_prog(program)) + } + } + + // brute force run the analysis twice because it cleans up more stuff + // assert(program.procedures.forall(transforms.rdDSAProperty)) + config.foreach { + _.analysisDotPath.foreach { s => + AnalysisResultDotLogger.writeToFile( + File(s"${s}_blockgraph-before-copyprop.dot"), + dotBlockGraph(program.mainProcedure) + ) + } + } + Logger.info("Copyprop Start") + transforms.copyPropParamFixedPoint(program, ctx.globalOffsets) + + transforms.fixupGuards(program) + transforms.removeDuplicateGuard(program) + config.foreach { + _.analysisDotPath.foreach { s => + AnalysisResultDotLogger.writeToFile(File(s"${s}_blockgraph-after-simp.dot"), dotBlockGraph(program.mainProcedure)) + } + } + + transforms.liftLinuxAssertFail(ctx) + + // assert(program.procedures.forall(transforms.rdDSAProperty)) + + assert(invariant.blockUniqueLabels(program)) + Logger.info(s"CopyProp ${timer.checkPoint("Simplify")} ms ") + + config.foreach { + _.dumpILToPath.foreach { s => + DebugDumpIRLogger.writeToFile(File(s"${s}_il-after-copyprop.il"), pp_prog(program)) + } + } + + // val x = program.procedures.forall(transforms.rdDSAProperty) + // assert(x) + if (ir.eval.SimplifyValidation.validate) { + Logger.info("DSA Check (after transform)") + val x = program.procedures.forall(transforms.rdDSAProperty) + assert(x) + Logger.info("DSA Check succeeded") + } + // run this after cond recovery because sign bit calculations often need high bits + // which go away in high level conss + config.foreach { + _.dumpILToPath.foreach { s => + DebugDumpIRLogger.writeToFile(File(s"${s}_il-after-slices.il"), pp_prog(program)) + } + } + + // re-apply dsa + // transforms.OnePassDSA().applyTransform(program) + + if (ir.eval.SimplifyValidation.validate) { + Logger.info("[!] Simplify :: Writing simplification validation") + val w = BufferedWriter(FileWriter("rewrites.smt2")) + ir.eval.SimplifyValidation.makeValidation(w) + w.close() + } + + Logger.info("[!] Simplify :: finished") +} diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index 85b4565072..309d3a881b 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -7,7 +7,6 @@ import ir.* import ir.dsl.given import ir.eval.* import ir.transforms.* -import specification.* import translating.* import translating.PrettyPrinter.* import util.DSAConfig.Prereq @@ -112,7 +111,7 @@ object RunUtils { q.loading.dumpIL.foreach(s => DebugDumpIRLogger.writeToFile(File(s"$s-before-analysis.il"), pp_prog(ctx.program))) val analysis = q.staticAnalysis.map { conf => - staticAnalysis(conf, ctx) + AnalysisPipelineMRA.runToFixpoint(conf, ctx) } q.loading.dumpIL.foreach(s => DebugDumpIRLogger.writeToFile(File(s"$s-after-analysis.il"), pp_prog(ctx.program))) @@ -230,106 +229,6 @@ object RunUtils { BASILResult(ctx, analysis, dsaContext, boogiePrograms) } - /** Use static analysis to resolve indirect calls and replace them in the IR until fixed point. - */ - def staticAnalysis(config: StaticAnalysisConfig, ctx: IRContext): StaticAnalysisContext = { - var iteration = 1 - var modified: Boolean = true - val analysisResult = mutable.ArrayBuffer[StaticAnalysisContext]() - while (modified) { - Logger.debug("[!] Running Static Analysis") - val result = analysis.AnalysisPipelineMRA.analyse(ctx, config, iteration, analysisResult.lastOption) - val previousResult = analysisResult.lastOption - analysisResult.append(result) - StaticAnalysisLogger.info("[!] Replacing Indirect Calls") - - /* - modified = transforms.SteensgaardIndirectCallResolution( - ctx.program, - result.steensgaardResults, - result.reachingDefs - ).resolveIndirectCalls() - */ - - if ( - config.memoryRegions == MemoryRegionsMode.MRA && (previousResult.isEmpty || result.vsaResult != previousResult.get.vsaResult) - ) { - modified = true - } else { - modified = - transforms.VSAIndirectCallResolution(ctx.program, result.vsaResult, result.mmmResults).resolveIndirectCalls() - } - - if (modified) { - iteration += 1 - StaticAnalysisLogger.info(s"[!] Analysing again (iter $iteration)") - } - } - - // should later move this to be inside while (modified) loop and have splitting threads cause further iterations - - if (config.threadSplit) { - transforms.splitThreads(ctx.program, analysisResult.last.steensgaardResults, analysisResult.last.ssaResults) - } - - val reachingDefs = ReachingDefsAnalysis(ctx.program, analysisResult.last.writesToResult).analyze() - config.analysisDotPath.foreach { s => - AnalysisResultDotLogger.writeToFile(File(s"${s}_ct.dot"), toDot(ctx.program)) - } - - StaticAnalysisLogger.info("[!] Running Symbolic Access Analysis") - val symResults: Map[CFGPosition, Map[SymbolicAddress, TwoElement]] = - SymbolicAddressAnalysis(ctx.program, analysisResult.last.interProcConstProp).analyze() - config.analysisDotPath.foreach { s => - val labels = symResults.map { (k, v) => k -> v.toString } - AnalysisResultDotLogger.writeToFile(File(s"${s}_saa.dot"), toDot(ctx.program, labels)) - } - - StaticAnalysisLogger.info("[!] Running DSA Analysis") - - writeToFile(pp_prog(ctx.program), "testo1.il") - val symbolTableEntries: Set[SymbolTableEntry] = ctx.globals ++ ctx.funcEntries - val dsa = DataStructureAnalysis( - ctx.program, - symResults, - analysisResult.last.interProcConstProp, - symbolTableEntries, - ctx.globalOffsets, - ctx.externalFunctions, - reachingDefs, - analysisResult.last.writesToResult, - analysisResult.last.paramResults - ) - dsa.analyze() - - config.analysisDotPath.foreach { s => - dsa.topDown(ctx.program.mainProcedure).toDot - DebugDumpIRLogger.writeToFile(File(s"${s}_main_dsg.dot"), dsa.topDown(ctx.program.mainProcedure).toDot) - } - - Logger.debug("[!] Injecting regions") - val regionInjector = if (config.memoryRegions == MemoryRegionsMode.MRA) { - val injector = RegionInjectorMRA(ctx.program, analysisResult.last.mmmResults) - injector.injectRegions() - Some(injector) - } else if (config.memoryRegions == MemoryRegionsMode.DSA) { - val injector = RegionInjectorDSA(ctx.program, dsa.topDown) - injector.injectRegions() - Some(injector) - } else { - None - } - - assert(invariant.singleCallBlockEnd(ctx.program)) - StaticAnalysisLogger.info(s"[!] Finished indirect call resolution after $iteration iterations") - analysisResult.last.copy( - symbolicAddresses = symResults, - localDSA = dsa.local.toMap, - bottomUpDSA = dsa.bottomUp.toMap, - topDownDSA = dsa.topDown.toMap, - regionInjector = regionInjector - ) - } } def readFromFile(fileName: String): Iterable[String] = { diff --git a/src/test/scala/DataStructureAnalysisTest.scala b/src/test/scala/DataStructureAnalysisTest.scala index d2a08d7462..2562e968f9 100644 --- a/src/test/scala/DataStructureAnalysisTest.scala +++ b/src/test/scala/DataStructureAnalysisTest.scala @@ -1,5 +1,5 @@ -import analysis.StaticAnalysisContext import analysis.data_structure_analysis.* +import analysis.{AnalysisPipelineMRA, StaticAnalysisContext} import boogie.SpecGlobal import ir.* import ir.dsl.* @@ -27,7 +27,7 @@ class DataStructureAnalysisTest extends AnyFunSuite with CaptureOutput { val emptySpec = Specification(Set(), Set(), Map(), List(), List(), List(), Set()) val emptyContext = IRContext(List(), Set(), Set(), Set(), Map(), emptySpec, program) - RunUtils.staticAnalysis(StaticAnalysisConfig(), emptyContext) + AnalysisPipelineMRA.runToFixpoint(StaticAnalysisConfig(), emptyContext) } def runTest(relativePath: String): BASILResult = { @@ -754,7 +754,7 @@ class DataStructureAnalysisTest extends AnyFunSuite with CaptureOutput { val spec = Specification(Set(), globals, Map(), List(), List(), List(), Set()) val context = IRContext(List(), Set(), globals, Set(), globalOffsets, spec, program) - val staticAnalysisResult = RunUtils.staticAnalysis(StaticAnalysisConfig(), context) + val staticAnalysisResult = AnalysisPipelineMRA.runToFixpoint(StaticAnalysisConfig(), context) val dsg = staticAnalysisResult.topDownDSA(program.mainProcedure) diff --git a/src/test/scala/DifferentialAnalysisTest.scala b/src/test/scala/DifferentialAnalysisTest.scala index ac4fd6cdc0..10822d8c35 100644 --- a/src/test/scala/DifferentialAnalysisTest.scala +++ b/src/test/scala/DifferentialAnalysisTest.scala @@ -1,10 +1,10 @@ -import analysis.AnalysisManager +import analysis.{AnalysisManager, AnalysisPipelineMRA} import ir.eval.{ExecEffect, *} import ir.{IRContext, IRLoading, *} import org.scalatest.* import org.scalatest.funsuite.* import test_util.* -import util.{ILLoadingConfig, LogLevel, Logger, RunUtils, StaticAnalysisConfig} +import util.{ILLoadingConfig, LogLevel, Logger, StaticAnalysisConfig} import java.io.File @@ -100,7 +100,7 @@ abstract class DifferentialTest extends AnyFunSuite, CaptureOutput, TestCustomis ir.transforms.clearParams(comparectx.program) for (analysis <- staticAnalysisConfig) { - RunUtils.staticAnalysis(analysis, comparectx) + AnalysisPipelineMRA.runToFixpoint(analysis, comparectx) } if (simplify) { diff --git a/src/test/scala/InterpretTestConstProp.scala b/src/test/scala/InterpretTestConstProp.scala index cf738036c0..683c2eb536 100644 --- a/src/test/scala/InterpretTestConstProp.scala +++ b/src/test/scala/InterpretTestConstProp.scala @@ -1,9 +1,9 @@ -import analysis.* +import analysis.{AnalysisPipelineMRA, *} import ir.{IRLoading, *} import org.scalatest.* import org.scalatest.funsuite.* import test_util.{BASILTest, CaptureOutput, TestValueDomainWithInterpreter} -import util.{ILLoadingConfig, LogLevel, Logger, RunUtils, StaticAnalysisConfig} +import util.{ILLoadingConfig, LogLevel, Logger, StaticAnalysisConfig} @test_util.tags.StandardSystemTest class InterpretTestConstProp @@ -29,7 +29,7 @@ class InterpretTestConstProp val ictx = IRLoading.load(loading) ir.transforms.doCleanupWithoutSimplify(ictx, AnalysisManager(ictx.program)) ir.transforms.clearParams(ictx.program) - val analyses = RunUtils.staticAnalysis(StaticAnalysisConfig(None, None, None), ictx) + val analyses = AnalysisPipelineMRA.runToFixpoint(StaticAnalysisConfig(None, None, None), ictx) val analysisres = analyses.intraProcConstProp.collect { case (block: Block, v) => block -> v diff --git a/src/test/scala/IntervalDSATest.scala b/src/test/scala/IntervalDSATest.scala index 5300c5ad45..a9b9b46d56 100644 --- a/src/test/scala/IntervalDSATest.scala +++ b/src/test/scala/IntervalDSATest.scala @@ -1,5 +1,5 @@ import analysis.data_structure_analysis.{DSInterval, Global, Heap, IntervalDSA, Par, Ret, Stack, SymBase} -import analysis.{StaticAnalysisContext, data_structure_analysis} +import analysis.{AnalysisPipelineMRA, StaticAnalysisContext, data_structure_analysis} import boogie.SpecGlobal import ir.Endian.LittleEndian import ir.dsl.{block, directCall, goto, proc, prog, ret} @@ -20,7 +20,7 @@ class IntervalDSATest extends AnyFunSuite with CaptureOutput { val emptySpec = Specification(Set(), Set(), Map(), List(), List(), List(), Set()) val emptyContext = IRContext(List(), Set(), Set(), Set(), Map(), emptySpec, program) - RunUtils.staticAnalysis(StaticAnalysisConfig(), emptyContext) + AnalysisPipelineMRA.runToFixpoint(StaticAnalysisConfig(), emptyContext) } def runTest(relativePath: String, config: DSAConfig = Checks): BASILResult = { diff --git a/src/test/scala/MemoryTransformTests.scala b/src/test/scala/MemoryTransformTests.scala index 3203d7f09a..bd7f367f17 100644 --- a/src/test/scala/MemoryTransformTests.scala +++ b/src/test/scala/MemoryTransformTests.scala @@ -1,4 +1,4 @@ -import analysis.StaticAnalysisContext +import analysis.{AnalysisPipelineMRA, StaticAnalysisContext} import boogie.SpecGlobal import ir.* import ir.Endian.LittleEndian @@ -18,7 +18,7 @@ class MemoryTransformTests extends AnyFunSuite with CaptureOutput { val emptySpec = Specification(Set(), Set(), Map(), List(), List(), List(), Set()) val emptyContext = IRContext(List(), Set(), Set(), Set(), Map(), emptySpec, program) - RunUtils.staticAnalysis(StaticAnalysisConfig(), emptyContext) + AnalysisPipelineMRA.runToFixpoint(StaticAnalysisConfig(), emptyContext) } def runTest(relativePath: String): BASILResult = { diff --git a/src/test/scala/SVATest.scala b/src/test/scala/SVATest.scala index 85933e4dc6..e043dc45b2 100644 --- a/src/test/scala/SVATest.scala +++ b/src/test/scala/SVATest.scala @@ -1,6 +1,6 @@ -import analysis.StaticAnalysisContext import analysis.data_structure_analysis.* import analysis.data_structure_analysis.given +import analysis.{AnalysisPipelineMRA, StaticAnalysisContext} import boogie.SpecGlobal import ir.* import ir.dsl.* @@ -19,7 +19,7 @@ class SVATest extends AnyFunSuite with CaptureOutput { val emptySpec = Specification(Set(), Set(), Map(), List(), List(), List(), Set()) val emptyContext = IRContext(List(), Set(), Set(), Set(), Map(), emptySpec, program) - RunUtils.staticAnalysis(StaticAnalysisConfig(), emptyContext) + AnalysisPipelineMRA.runToFixpoint(StaticAnalysisConfig(), emptyContext) } def runTest(context: IRContext): BASILResult = { From 6192865bc8467af0a9eff1e4c2a3f3c22f312c10 Mon Sep 17 00:00:00 2001 From: James Tobler <64625414+j-tobler@users.noreply.github.com> Date: Fri, 15 Aug 2025 08:50:14 +1000 Subject: [PATCH 24/30] scalafmt --- basilmill/platform.mill | 1 - 1 file changed, 1 deletion(-) diff --git a/basilmill/platform.mill b/basilmill/platform.mill index 3ea6fc7677..e33d6a3382 100644 --- a/basilmill/platform.mill +++ b/basilmill/platform.mill @@ -26,7 +26,6 @@ object Platform { case "amd64" => Right(Arch.X86_64) case "x86_64" => Right(Arch.X86_64) case "aarch64" => Right(Arch.Aarch64) - case "x86_64" => Right(Arch.X86_64) case x => Left("unknown arch: " + x) } From e3d0ca9f815df13ec9a9393e8cc59193225e390b Mon Sep 17 00:00:00 2001 From: James Tobler <64625414+j-tobler@users.noreply.github.com> Date: Fri, 15 Aug 2025 08:58:20 +1000 Subject: [PATCH 25/30] ran ./mill fmt --- src/main/scala/Main.scala | 2 +- .../scala/analysis/data_structure_analysis/IntervalDSA.scala | 2 +- .../data_structure_analysis/SymbolicValueAnalysis.scala | 2 +- src/main/scala/util/BASILConfig.scala | 3 +-- src/test/scala/IntervalDSATest.scala | 2 +- src/test/scala/IrreducibleLoop.scala | 1 + 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/main/scala/Main.scala b/src/main/scala/Main.scala index 98e54174c0..ad1030635b 100644 --- a/src/main/scala/Main.scala +++ b/src/main/scala/Main.scala @@ -1,8 +1,8 @@ // package scala import gtirb.GTIRBReadELF -import mainargs.{Flag, ParserForClass, arg, main} import ir.{FrontendMode, IRLoading} +import mainargs.{Flag, ParserForClass, arg, main} import util.boogie_interaction.BoogieResultKind import util.{ AnalysisResultDotLogger, diff --git a/src/main/scala/analysis/data_structure_analysis/IntervalDSA.scala b/src/main/scala/analysis/data_structure_analysis/IntervalDSA.scala index 24ef2d6dfa..e47d30377a 100644 --- a/src/main/scala/analysis/data_structure_analysis/IntervalDSA.scala +++ b/src/main/scala/analysis/data_structure_analysis/IntervalDSA.scala @@ -11,7 +11,7 @@ import specification.{ExternalFunction, FuncEntry, SymbolTableEntry} import util.DSAPhase.* import util.LogLevel.INFO import util.assertion.* -import util.{DSConfig, DSALogger, DSAPhase, PerformanceTimer} +import util.{DSALogger, DSAPhase, DSConfig, PerformanceTimer} import scala.collection.mutable import scala.collection.mutable.ArrayBuffer diff --git a/src/main/scala/analysis/data_structure_analysis/SymbolicValueAnalysis.scala b/src/main/scala/analysis/data_structure_analysis/SymbolicValueAnalysis.scala index 0bb8d84b10..cd952ae8c2 100644 --- a/src/main/scala/analysis/data_structure_analysis/SymbolicValueAnalysis.scala +++ b/src/main/scala/analysis/data_structure_analysis/SymbolicValueAnalysis.scala @@ -5,8 +5,8 @@ import ir.* import ir.cilvisitor.{CILVisitor, DoChildren, SkipChildren, visit_expr} import ir.eval.BitVectorEval.bv2SignedInt import ir.transforms.{AbstractDomain, worklistSolver} +import util.SVALogger as Logger import util.assertion.* -import util.{SVALogger as Logger} import scala.annotation.tailrec import scala.collection.{SortedMap, mutable} diff --git a/src/main/scala/util/BASILConfig.scala b/src/main/scala/util/BASILConfig.scala index c97ca3c0ce..49c7d0d8d5 100644 --- a/src/main/scala/util/BASILConfig.scala +++ b/src/main/scala/util/BASILConfig.scala @@ -1,7 +1,6 @@ package util -import ir.IRContext -import ir.FrontendMode +import ir.{FrontendMode, IRContext} enum ProcRelyVersion { case Function, IfCommandContradiction diff --git a/src/test/scala/IntervalDSATest.scala b/src/test/scala/IntervalDSATest.scala index 8767b4432c..185ca002e2 100644 --- a/src/test/scala/IntervalDSATest.scala +++ b/src/test/scala/IntervalDSATest.scala @@ -1,4 +1,4 @@ -import analysis.data_structure_analysis.{DSInterval, Heap, IntervalDSA, Par, Ret, Stack, SymBase} +import analysis.data_structure_analysis.{DSInterval, Heap, IntervalDSA, Par, Ret, SymBase} import analysis.{AnalysisPipelineMRA, StaticAnalysisContext, data_structure_analysis} import boogie.SpecGlobal import ir.Endian.LittleEndian diff --git a/src/test/scala/IrreducibleLoop.scala b/src/test/scala/IrreducibleLoop.scala index ce5500df1e..f0ef333c71 100644 --- a/src/test/scala/IrreducibleLoop.scala +++ b/src/test/scala/IrreducibleLoop.scala @@ -4,6 +4,7 @@ import org.scalatest.funsuite.AnyFunSuite import test_util.{BASILTest, CaptureOutput} import translating.{BAPToIR, ReadELFData} import util.{ILLoadingConfig, LogLevel, Logger} + import scala.sys.process.* @test_util.tags.UnitTest From 16e590aba048a8e61fd706818dcf8a87556b0432 Mon Sep 17 00:00:00 2001 From: James Tobler <64625414+j-tobler@users.noreply.github.com> Date: Fri, 12 Sep 2025 14:36:48 +1000 Subject: [PATCH 26/30] remove commented out code --- src/main/scala/ir/transforms/Simp.scala | 249 +----------------------- 1 file changed, 2 insertions(+), 247 deletions(-) diff --git a/src/main/scala/ir/transforms/Simp.scala b/src/main/scala/ir/transforms/Simp.scala index 44f286d24e..87bfd700e2 100644 --- a/src/main/scala/ir/transforms/Simp.scala +++ b/src/main/scala/ir/transforms/Simp.scala @@ -1,14 +1,13 @@ package ir.transforms -import analysis.LoopDetector import ir.* import ir.cilvisitor.* import ir.eval.{AlgebraicSimplifications, AssumeConditionSimplifications, simplifyExprFixpoint} import translating.PrettyPrinter.* import util.assertion.* -import util.{Logger, SimplifyLogger, condPropDebugLogger} +import util.{SimplifyLogger, condPropDebugLogger} -import java.io.{BufferedWriter, FileWriter} +import java.io.BufferedWriter import scala.collection.mutable import scala.util.boundary @@ -2293,247 +2292,3 @@ val makeProcEntriesNonLoops = Transform( man.ClobberAll } ) - -// --- DoSimplify ------------------------------------------------------------------------------------------------------ - -// the following code is a work in progress - -val reduceLoops = Transform( - "ReduceLoops", - (ctx, man) => { - val foundLoops = LoopDetector.identify_loops(ctx.program) - val newLoops = foundLoops.reducibleTransformIR() - newLoops.updateIrWithLoops() - man.ClobberAll - } -) - -val normaliseBlockNamesTransform = Transform( - "NormaliseBlockNames", - (ctx, man) => { - for (p <- ctx.program.procedures) { - p.normaliseBlockNames() - } - man.ClobberAll - } -) - -val sortProceduresRpoTransform = Transform( - "NormaliseBlockNames", - (ctx, man) => { - ctx.program.sortProceduresRPO() - man.ClobberAll - } -) - -val liftSvCompTransform = Transform( - "LiftSvComp", - (ctx, man) => { - liftSVComp(ctx.program) - man.ClobberAll - } -) - -val removeEmptyBlocksTransform = Transform( - "RemoveEmptyBlocks", - (ctx, man) => { - removeEmptyBlocks(ctx.program) - man.ClobberAll - } -) - -val onePassDsaTransform = Transform( - "OnePassDsa", - (ctx, man) => { - OnePassDSA().applyTransform(ctx.program) - man.ClobberAll - } -) - -// fixme: this is not really a transform, but a check on the ir -val dsaCheck = Transform( - "DsaCheck", - (ctx, man) => { - Logger.info("DSA no uninitialised") - assert(invariant.allVariablesAssignedIndex(ctx.program)) - // Logger.info("Live vars difftest") - // val tipLiveVars : Map[CFGPosition, Set[Variable]] = analysis.IntraLiveVarsAnalysis(ctx.program).analyze() - // assert(ctx.program.procedures.forall(transforms.difftestLiveVars(_, tipLiveVars))) - - Logger.info("DSA Check") - val x = ctx.program.procedures.forall(rdDSAProperty) - assert(x) - Logger.info("DSA Check passed") - assert(invariant.singleCallBlockEnd(ctx.program)) - assert(invariant.cfgCorrect(ctx.program)) - assert(invariant.blocksUniqueToEachProcedure(ctx.program)) - man.PreserveAll - } -) - -// fixme: similar issues to the above transform -val dsaCheckAfterTransform = Transform( - "DsaCheckAfterTransform", - (ctx, man) => { - Logger.info("DSA Check (after transform)") - val x = ctx.program.procedures.forall(rdDSAProperty) - assert(x) - Logger.info("DSA Check succeeded") - man.PreserveAll - } -) - -// fixme: similar issues to the above -// we might want to move this out of the transform, to the callsite -val logSimplificationValidation = Transform( - "LogSimplificationValidation", - (ctx, man) => { - Logger.info("[!] Simplify :: Writing simplification validation") - val w = BufferedWriter(FileWriter("rewrites.smt2")) - ir.eval.SimplifyValidation.makeValidation(w) - w.close() - man.PreserveAll - } -) - -val copyPropParamFixedPointTransform = Transform( - "CopyPropParamFixedPoint", - (ctx, man) => { - copyPropParamFixedPoint(ctx.program, ctx.globalOffsets) - man.ClobberAll - }, - notice = "Copyprop Start" -) - -val fixupGuardsTransform = Transform( - "FixUpGuards", - (ctx, man) => { - fixupGuards(ctx.program) - man.ClobberAll - } -) - -val removeDuplicateGuardsTransform = Transform( - "RemoveDuplicateGuards", - (ctx, man) => { - removeDuplicateGuard(ctx.program) - man.ClobberAll - } -) - -val liftLinuxAssertFailTransform = Transform( - "LiftLinuxAssertFail", - (ctx, man) => { - liftLinuxAssertFail(ctx) - man.ClobberAll - } -) - -def getDoSimplifyTransform(validate: Boolean) = TransformBatch( - "DoSimplify", - List( - reduceLoops, - normaliseBlockNamesTransform, - sortProceduresRpoTransform, - liftSvCompTransform, - /* - config.foreach { - _.dumpILToPath.foreach { s => - DebugDumpIRLogger.writeToFile(File(s"${s}_il-before-simp.il"), pp_prog(program)) - } - } - */ - applyRpoTransform, // (this transform was already defined in this file) - // example of printing a simple analysis - removeEmptyBlocksTransform, - coalesceBlocksOnce, - removeEmptyBlocksTransform, - // transforms.coalesceBlocksCrossBranchDependency(program) - /* - config.foreach { - _.analysisDotPath.foreach { s => - DebugDumpIRLogger.writeToFile(File(s"${s}_blockgraph-before-dsa.dot"), dotBlockGraph(program.mainProcedure)) - } - } - Logger.info("[!] Simplify :: DynamicSingleAssignment") - config.foreach { - _.dumpILToPath.foreach { s => - DebugDumpIRLogger.writeToFile(File(s"${s}_il-before-dsa.il"), pp_prog(program)) - } - } - */ - onePassDsaTransform, - inlinePLTLaunchpad, - removeEmptyBlocksTransform, - /* - config.foreach { - _.analysisDotPath.foreach { s => - AnalysisResultDotLogger.writeToFile( - File(s"${s}_blockgraph-after-dsa.dot"), - dotBlockGraph( - program, - (program.collect { case b: Block => - b -> pp_block(b) - }).toMap - ) - ) - } - } - config.foreach { - _.dumpILToPath.foreach { s => - DebugDumpIRLogger.writeToFile(File(s"${s}_il-after-dsa.il"), pp_prog(program)) - } - } - */ - // todo: only run iff ir.eval.SimplifyValidation.validate (that is: iff conf.validateSimp) - dsaCheck, - // todo: - // - if config is set and config.dumpILToPath is set then dump il - // - if config is set and config.analysisDotPath is set then dump blockgraph - // - always log performance for this transform - copyPropParamFixedPointTransform, - fixupGuardsTransform, - removeDuplicateGuardsTransform, - /* - config.foreach { - _.analysisDotPath.foreach { s => - AnalysisResultDotLogger.writeToFile( - File(s"${s}_blockgraph-after-simp.dot"), - dotBlockGraph(program.mainProcedure) - ) - } - } - */ - liftLinuxAssertFailTransform, - // assert(program.procedures.forall(transforms.rdDSAProperty)) - // todo: transforms should have the ability to log their performance as soon as they finish - /* - assert(invariant.blockUniqueLabels(program)) - Logger.info(s"CopyProp ${timer.checkPoint("Simplify")} ms ") - - config.foreach { - _.dumpILToPath.foreach { s => - DebugDumpIRLogger.writeToFile(File(s"${s}_il-after-copyprop.il"), pp_prog(program)) - } - } - */ - // val x = program.procedures.forall(transforms.rdDSAProperty) - // assert(x) - dsaCheckAfterTransform, // todo: only run iff conf.validateSimp - // run this after cond recovery because sign bit calculations often need high bits - // which go away in high level conss - /* - config.foreach { - _.dumpILToPath.foreach { s => - DebugDumpIRLogger.writeToFile(File(s"${s}_il-after-slices.il"), pp_prog(program)) - } - } - */ - // re-apply dsa - // transforms.OnePassDSA().applyTransform(program) - logSimplificationValidation // todo: only run iff conf.validateSimp - ), - notice = "Running Simplify", - // fixme: not an appropriate use of this field - postRunChecks = _ => Logger.info("[!] Simplify :: finished") -) From d9f90231fd4adbcdc826129f1929679c95b8d782 Mon Sep 17 00:00:00 2001 From: James Tobler <64625414+j-tobler@users.noreply.github.com> Date: Fri, 12 Sep 2025 14:51:43 +1000 Subject: [PATCH 27/30] clean up transformer instantiation in runutils --- src/main/scala/util/RunUtils.scala | 13 ++++--------- 1 file changed, 4 insertions(+), 9 deletions(-) diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index 3fd75cf37a..52b3cfe760 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -62,11 +62,6 @@ object RunUtils { assert(invariant.blocksUniqueToEachProcedure(ctx.program)) val analysisManager = AnalysisManager(ctx.program) - // these transforms depend on basil config parameters and thus need to be constructed here - val prepareForTranslation = getPrepareForTranslationTransform(q, Set("free")) - val genProcSummaries = getGenerateProcedureSummariesTransform(q.loading.parameterForm || q.simplify) - val genRgConditions = getGenerateRgConditionsTransform(ctx.program.procedures.toList.filter(_.returnBlock != None)) - val stripUnreachableFunctions = getStripUnreachableFunctionsTransform(q.loading.procedureTrimDepth) if conf.simplify then doCleanupWithSimplify(ctx, analysisManager) else doCleanupWithoutSimplify(ctx, analysisManager) @@ -77,7 +72,7 @@ object RunUtils { assert(ir.invariant.programDiamondForm(ctx.program)) - if q.loading.trimEarly then stripUnreachableFunctions(ctx, analysisManager) + if q.loading.trimEarly then getStripUnreachableFunctionsTransform(q.loading.procedureTrimDepth)(ctx, analysisManager) // todo: since refactoring, there is some extra code that is run here // see StripUnreachableFunctions.getStripUnreachableFunctionsTransform @@ -153,7 +148,7 @@ object RunUtils { memTransferTimer.checkPoint("Performed Memory Transform") } - if q.summariseProcedures then genProcSummaries(ctx, analysisManager) + if q.summariseProcedures then getGenerateProcedureSummariesTransform(q.loading.parameterForm || q.simplify)(ctx, analysisManager) if (!conf.staticAnalysis.exists(!_.irreducibleLoops) && conf.generateLoopInvariants) { if (!conf.staticAnalysis.exists(_.irreducibleLoops)) { @@ -194,9 +189,9 @@ object RunUtils { } } - prepareForTranslation(ctx, analysisManager) + getPrepareForTranslationTransform(q, Set("free"))(ctx, analysisManager) - if conf.generateRelyGuarantees then genRgConditions(ctx, analysisManager) + if conf.generateRelyGuarantees then getGenerateRgConditionsTransform(ctx.program.procedures.toList.filter(_.returnBlock != None))(ctx, analysisManager) q.loading.dumpIL.foreach(s => { val timer = PerformanceTimer("Dump IL") From 8ae26c741f507c48893bdf1d01ce5fda828a60f4 Mon Sep 17 00:00:00 2001 From: James Tobler <64625414+j-tobler@users.noreply.github.com> Date: Fri, 12 Sep 2025 14:55:34 +1000 Subject: [PATCH 28/30] ./mill fmt --- src/main/scala/ir/transforms/Simp.scala | 1 - src/main/scala/util/RunUtils.scala | 12 +++++++++--- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/src/main/scala/ir/transforms/Simp.scala b/src/main/scala/ir/transforms/Simp.scala index 87bfd700e2..df0cfbee2e 100644 --- a/src/main/scala/ir/transforms/Simp.scala +++ b/src/main/scala/ir/transforms/Simp.scala @@ -7,7 +7,6 @@ import translating.PrettyPrinter.* import util.assertion.* import util.{SimplifyLogger, condPropDebugLogger} -import java.io.BufferedWriter import scala.collection.mutable import scala.util.boundary diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index 52b3cfe760..16645f19d8 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -72,7 +72,8 @@ object RunUtils { assert(ir.invariant.programDiamondForm(ctx.program)) - if q.loading.trimEarly then getStripUnreachableFunctionsTransform(q.loading.procedureTrimDepth)(ctx, analysisManager) + if q.loading.trimEarly then + getStripUnreachableFunctionsTransform(q.loading.procedureTrimDepth)(ctx, analysisManager) // todo: since refactoring, there is some extra code that is run here // see StripUnreachableFunctions.getStripUnreachableFunctionsTransform @@ -148,7 +149,8 @@ object RunUtils { memTransferTimer.checkPoint("Performed Memory Transform") } - if q.summariseProcedures then getGenerateProcedureSummariesTransform(q.loading.parameterForm || q.simplify)(ctx, analysisManager) + if q.summariseProcedures then + getGenerateProcedureSummariesTransform(q.loading.parameterForm || q.simplify)(ctx, analysisManager) if (!conf.staticAnalysis.exists(!_.irreducibleLoops) && conf.generateLoopInvariants) { if (!conf.staticAnalysis.exists(_.irreducibleLoops)) { @@ -191,7 +193,11 @@ object RunUtils { getPrepareForTranslationTransform(q, Set("free"))(ctx, analysisManager) - if conf.generateRelyGuarantees then getGenerateRgConditionsTransform(ctx.program.procedures.toList.filter(_.returnBlock != None))(ctx, analysisManager) + if conf.generateRelyGuarantees then + getGenerateRgConditionsTransform(ctx.program.procedures.toList.filter(_.returnBlock != None))( + ctx, + analysisManager + ) q.loading.dumpIL.foreach(s => { val timer = PerformanceTimer("Dump IL") From 90be903fb8e284aab4cf442a57b1fde8733d5670 Mon Sep 17 00:00:00 2001 From: James Tobler <64625414+j-tobler@users.noreply.github.com> Date: Fri, 12 Sep 2025 15:20:32 +1000 Subject: [PATCH 29/30] fix scaladoc build errors (hopefully) --- src/main/scala/ir/parsing/package.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/scala/ir/parsing/package.scala b/src/main/scala/ir/parsing/package.scala index 5bcba23103..d2ad8aa6e2 100644 --- a/src/main/scala/ir/parsing/package.scala +++ b/src/main/scala/ir/parsing/package.scala @@ -70,7 +70,7 @@ package ir * The visitor returns an unresolved Basil DSL structure, [[ir.dsl.EventuallyProgram]]. * 6. In [[ir.parsing.ParseBasilIL.makeBasilIRContext]], the DSL structure is resolved into a * real Basil IR [[ir.Program]] and combined with the [[ir.parsing.Declarations]] - * to produce a [[util.IRContext]]. + * to produce a [[ir.IRContext]]. * */ package object parsing {} From 61680f905cde4b8d79f56834e8d0c83125504a14 Mon Sep 17 00:00:00 2001 From: James Tobler <64625414+j-tobler@users.noreply.github.com> Date: Fri, 12 Sep 2025 15:28:46 +1000 Subject: [PATCH 30/30] fix scaladoc build errors; attempt 2 --- src/main/scala/ir/parsing/ParseBasilIL.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/scala/ir/parsing/ParseBasilIL.scala b/src/main/scala/ir/parsing/ParseBasilIL.scala index eaa86c4dce..7b3f19c869 100644 --- a/src/main/scala/ir/parsing/ParseBasilIL.scala +++ b/src/main/scala/ir/parsing/ParseBasilIL.scala @@ -7,7 +7,7 @@ import java.io.{FileReader, Reader, StringReader} object ParseBasilIL { /** - * Combines the parsed declarations and the parsed DSL program into a [[util.IRContext]], + * Combines the parsed declarations and the parsed DSL program into a [[ir.IRContext]], * including resolving the DSL program into a Basil IR program. */ def makeBasilIRContext(decls: Declarations, prog: ir.dsl.EventuallyProgram) = {