diff --git a/src/main/scala/Main.scala b/src/main/scala/Main.scala index d425e8eb6..ad1030635 100644 --- a/src/main/scala/Main.scala +++ b/src/main/scala/Main.scala @@ -1,6 +1,7 @@ // package scala import gtirb.GTIRBReadELF +import ir.{FrontendMode, IRLoading} import mainargs.{Flag, ParserForClass, arg, main} import util.boogie_interaction.BoogieResultKind import util.{ @@ -11,9 +12,7 @@ import util.{ DSAPhase, DSConfig, DebugDumpIRLogger, - FrontendMode, ILLoadingConfig, - IRLoading, LogLevel, Logger, MemoryRegionsMode, diff --git a/src/main/scala/analysis/AnalysePipelineMRA.scala b/src/main/scala/analysis/AnalysePipelineMRA.scala new file mode 100644 index 000000000..41492a56d --- /dev/null +++ b/src/main/scala/analysis/AnalysePipelineMRA.scala @@ -0,0 +1,424 @@ +package analysis + +import analysis.data_structure_analysis.{DataStructureAnalysis, SymbolicAddress, SymbolicAddressAnalysis} +import analysis.{Interval as _, *} +import boogie.* +import ir.* +import specification.* +import translating.PrettyPrinter.pp_prog +import util.{ + AnalysisResultDotLogger, + DebugDumpIRLogger, + Logger, + MemoryRegionsMode, + StaticAnalysisConfig, + StaticAnalysisLogger, + writeToFile +} + +import java.io.File +import scala.collection.mutable + +/** Methods relating to program static analysis. + */ +object AnalysisPipelineMRA { + + def reducibleLoops(IRProgram: Program) = { + StaticAnalysisLogger.debug("reducible loops") + val foundLoops = LoopDetector.identify_loops(IRProgram) + foundLoops.irreducibleLoops.foreach(l => StaticAnalysisLogger.debug(s"Irreducible loop found: ${l.name}")) + + val newLoops = foundLoops.reducibleTransformIR().identifiedLoops + newLoops.foreach(l => StaticAnalysisLogger.debug(s"Loop found: ${l.name}")) + + foundLoops.updateIrWithLoops() + } + + /** Run all static analysis passes on the provided IRProgram. + */ + def analyse( + ictx: IRContext, + config: StaticAnalysisConfig, + iteration: Int, + previousResults: Option[StaticAnalysisContext] = None + ): StaticAnalysisContext = { + var ctx = ictx + val IRProgram: Program = ctx.program + val externalFunctions: Set[ExternalFunction] = ctx.externalFunctions + val globals: Set[SpecGlobal] = ctx.globals + val globalOffsets: Map[BigInt, BigInt] = ctx.globalOffsets + + assert(invariant.singleCallBlockEnd(ctx.program)) + assert(invariant.cfgCorrect(ctx.program)) + assert(invariant.blocksUniqueToEachProcedure(ctx.program)) + assert(invariant.correctCalls(ctx.program)) + + val subroutines = IRProgram.procedures + .filter(p => p.address.isDefined) + .map(p => p.address.get -> p.name) + .toMap + val globalAddresses = globals.map(s => s.address -> s.name).toMap + val globalSizes = globals.map(s => s.name -> s.size).toMap + val externalAddresses = externalFunctions.map(e => e.offset -> e.name).toMap + StaticAnalysisLogger.debug("Globals:") + StaticAnalysisLogger.debug(globalAddresses) + StaticAnalysisLogger.debug("Global Offsets: ") + StaticAnalysisLogger.debug(globalOffsets) + StaticAnalysisLogger.debug("Global Sizes: ") + StaticAnalysisLogger.debug(globalSizes) + StaticAnalysisLogger.debug("External: ") + StaticAnalysisLogger.debug(externalAddresses) + StaticAnalysisLogger.debug("Subroutine Addresses:") + StaticAnalysisLogger.debug(subroutines) + + // reducible loops + if (config.irreducibleLoops) { + reducibleLoops(IRProgram) + + config.analysisDotPath.foreach { s => + AnalysisResultDotLogger.writeToFile( + File(s"${s}_graph-after-loop-reduce-$iteration.dot"), + dotBlockGraph(IRProgram, IRProgram.map(b => b -> b.toString).toMap) + ) + AnalysisResultDotLogger.writeToFile( + File(s"${s}_blockgraph-after-loop-reduce-$iteration.dot"), + dotBlockGraph(IRProgram, IRProgram.filter(_.isInstanceOf[Block]).map(b => b -> b.toString).toMap) + ) + } + } + + val mergedSubroutines = subroutines ++ externalAddresses + + val domain = computeDomain(IntraProcIRCursor, IRProgram.procedures) + val interDomain = computeDomain(InterProcIRCursor, IRProgram.procedures) + + StaticAnalysisLogger.debug("[!] Running ANR") + val ANRSolver = ANRAnalysisSolver(IRProgram) + val ANRResult = ANRSolver.analyze() + + StaticAnalysisLogger.debug("[!] Running RNA") + val RNASolver = RNAAnalysisSolver(IRProgram) + val RNAResult = RNASolver.analyze() + + StaticAnalysisLogger.debug("[!] Running Inter-procedural Constant Propagation") + val interProcConstProp = InterProcConstantPropagation(IRProgram) + val interProcConstPropResult: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]] = + interProcConstProp.analyze() + + config.analysisResultsPath.foreach { s => + DebugDumpIRLogger.writeToFile( + File(s"${s}OGconstprop$iteration.txt"), + printAnalysisResults(IRProgram, interProcConstPropResult) + ) + } + + val intraProcConstProp = IntraProcConstantPropagation(IRProgram) + val intraProcConstPropResult: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]] = + intraProcConstProp.analyze() + + config.analysisResultsPath.foreach { s => + DebugDumpIRLogger.writeToFile( + File(s"${s}_new_ir_constprop$iteration.txt"), + printAnalysisResults(IRProgram, intraProcConstPropResult) + ) + } + + config.analysisDotPath.foreach { f => + val dumpdomain = computeDomain[CFGPosition, CFGPosition](InterProcIRCursor, IRProgram.procedures) + AnalysisResultDotLogger.writeToFile( + File(s"${f}_new_ir_intercfg$iteration.dot"), + toDot(dumpdomain.toSet, InterProcIRCursor, Map.empty, Set()) + ) + } + + val reachingDefinitionsAnalysisSolver = InterprocReachingDefinitionsAnalysisSolver(IRProgram) + val reachingDefinitionsAnalysisResults = reachingDefinitionsAnalysisSolver.analyze() + + config.analysisDotPath.foreach { s => + AnalysisResultDotLogger.writeToFile( + File(s"${s}_reachingDefinitions$iteration.dot"), + toDot( + IRProgram, + IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> reachingDefinitionsAnalysisResults(b).toString).toMap, + true + ) + ) + } + + Logger.debug("[!] Running Writes To") + val writesTo = WriteToAnalysis(ctx.program).analyze() + + Logger.debug("[!] Running commondef variable renaming (Intra SSA)") + val SSAResults = getCommonDefinitionVariableRenaming(IRProgram, writesTo) + + config.analysisDotPath.foreach(s => { + writeToFile( + toDot(IRProgram, IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> SSAResults(b).toString).toMap, true), + s"${s}_SSA$iteration.dot" + ) + }) + + val mmm = MemoryModelMap(globalOffsets, mergedSubroutines, globalAddresses, globalSizes) + mmm.preLoadGlobals() + + val previousVSAResults = if (previousResults.isDefined) { + previousResults.get.vsaResult + } else { + Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]]() + } + + StaticAnalysisLogger.debug("[!] Running GRA") + val graSolver = GlobalRegionAnalysisSolver( + IRProgram, + domain.toSet, + interProcConstPropResult, + reachingDefinitionsAnalysisResults, + mmm, + previousVSAResults + ) + val graResult = graSolver.analyze() + + StaticAnalysisLogger.debug("[!] Running MRA") + val mraSolver = MemoryRegionAnalysisSolver( + IRProgram, + domain.toSet, + interProcConstPropResult, + reachingDefinitionsAnalysisResults, + graResult, + mmm, + previousVSAResults + ) + val mraResult = mraSolver.analyze() + + config.analysisDotPath.foreach { s => + AnalysisResultDotLogger.writeToFile(File(s"${s}_callgraph$iteration.dot"), dotCallGraph(IRProgram)) + AnalysisResultDotLogger.writeToFile( + File(s"${s}_blockgraph$iteration.dot"), + dotBlockGraph(IRProgram, IRProgram.filter(_.isInstanceOf[Block]).map(b => b -> b.toString).toMap) + ) + + AnalysisResultDotLogger.writeToFile( + File(s"${s}_new_ir_constprop$iteration.dot"), + toDot( + IRProgram, + IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> intraProcConstPropResult(b).toString).toMap + ) + ) + + writeToFile( + toDot(IRProgram, IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> ANRResult(b).toString).toMap), + s"${s}_ANR$iteration.dot" + ) + + writeToFile( + toDot(IRProgram, IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> RNAResult(b).toString).toMap), + s"${s}_RNA$iteration.dot" + ) + + writeToFile( + toDot(IRProgram, IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> mraResult(b).toString).toMap), + s"${s}_MRA$iteration.dot" + ) + + AnalysisResultDotLogger.writeToFile( + File(s"${s}_GRA$iteration.dot"), + toDot(IRProgram, IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> graResult(b).toString).toMap) + ) + } + + StaticAnalysisLogger.debug("[!] Running MMM") + mmm.convertMemoryRegions( + mraSolver.procedureToStackRegions, + mraSolver.procedureToHeapRegions, + mraResult, + mraSolver.procedureToSharedRegions, + graSolver.getDataMap, + graResult + ) + mmm.logRegions() + + Logger.debug("[!] Running VSA") + val vsaSolver = ValueSetAnalysisSolver(IRProgram, mmm) + val vsaResult: Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]] = vsaSolver.analyze() + + mmm.postLoadVSARelations(vsaResult, ANRResult, RNAResult) + + config.analysisDotPath.foreach { s => + AnalysisResultDotLogger.writeToFile( + File(s"${s}_VSA$iteration.dot"), + toDot(IRProgram, IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> vsaResult(b).toString).toMap) + ) + } + + Logger.debug("[!] Running Steensgaard") + val steensgaardSolver = InterprocSteensgaardAnalysis(interDomain.toSet, mmm, SSAResults) + steensgaardSolver.analyze() + val steensgaardResults = steensgaardSolver.pointsTo() + + mmm.setCallSiteSummaries(steensgaardSolver.callSiteSummary) + + val paramResults: Map[Procedure, Set[Variable]] = ParamAnalysis(IRProgram).analyze() + val interLiveVarsResults: Map[CFGPosition, Map[Variable, TwoElement]] = InterLiveVarsAnalysis(IRProgram).analyze() + + StaticAnalysisContext( + intraProcConstProp = intraProcConstPropResult, + interProcConstProp = interProcConstPropResult, + memoryRegionResult = mraResult, + vsaResult = vsaResult, + interLiveVarsResults = interLiveVarsResults, + paramResults = paramResults, + steensgaardResults = steensgaardResults, + mmmResults = mmm, + symbolicAddresses = Map.empty, + reachingDefs = reachingDefinitionsAnalysisResults, + regionInjector = None, + localDSA = Map.empty, + bottomUpDSA = Map.empty, + topDownDSA = Map.empty, + writesToResult = writesTo, + ssaResults = SSAResults + ) + } + + def printAnalysisResults(prog: Program, result: Map[CFGPosition, _]): String = { + val results = mutable.ArrayBuffer[String]() + val toVisit = mutable.Stack[CFGPosition]() + val visited = mutable.HashSet[CFGPosition]() + toVisit.pushAll(prog.procedures) + + while (toVisit.nonEmpty) { + val next = toVisit.pop() + visited.add(next) + toVisit.pushAll( + IntraProcBlockIRCursor + .succ(next) + .diff(visited.collect[Block] { case b: Block => + b + }) + ) + + def contentStr(b: CFGPosition) = { + if result.contains(b) then "\n :: " + result(b) + else "" + } + + val t = next match + case p: Procedure => s"\nProcedure ${p.name}" + case b: Block => + Seq( + s" Block ${b.label}${contentStr(b)}", + b.statements + .map(s => { + " " + s.toString + contentStr(s) + }) + .mkString("\n"), + " " + b.jump.toString + contentStr(b.jump) + ).mkString("\n") + case s: Statement => s" Statement $s${contentStr(s)}" + case s: Jump => s" Jump $s${contentStr(s)}" + results.addOne(t) + } + results.mkString(System.lineSeparator()) + } + + /** Use static analysis to resolve indirect calls and replace them in the IR until fixed point. + */ + def runToFixpoint(config: StaticAnalysisConfig, ctx: IRContext): StaticAnalysisContext = { + var iteration = 1 + var modified: Boolean = true + val analysisResult = mutable.ArrayBuffer[StaticAnalysisContext]() + while (modified) { + Logger.debug("[!] Running Static Analysis") + val result = analysis.AnalysisPipelineMRA.analyse(ctx, config, iteration, analysisResult.lastOption) + val previousResult = analysisResult.lastOption + analysisResult.append(result) + StaticAnalysisLogger.info("[!] Replacing Indirect Calls") + + /* + modified = transforms.SteensgaardIndirectCallResolution( + ctx.program, + result.steensgaardResults, + result.reachingDefs + ).resolveIndirectCalls() + */ + + if ( + config.memoryRegions == MemoryRegionsMode.MRA && (previousResult.isEmpty || result.vsaResult != previousResult.get.vsaResult) + ) { + modified = true + } else { + modified = + transforms.VSAIndirectCallResolution(ctx.program, result.vsaResult, result.mmmResults).resolveIndirectCalls() + } + + if (modified) { + iteration += 1 + StaticAnalysisLogger.info(s"[!] Analysing again (iter $iteration)") + } + } + + // should later move this to be inside while (modified) loop and have splitting threads cause further iterations + + if (config.threadSplit) { + transforms.splitThreads(ctx.program, analysisResult.last.steensgaardResults, analysisResult.last.ssaResults) + } + + val reachingDefs = ReachingDefsAnalysis(ctx.program, analysisResult.last.writesToResult).analyze() + config.analysisDotPath.foreach { s => + AnalysisResultDotLogger.writeToFile(File(s"${s}_ct.dot"), toDot(ctx.program)) + } + + StaticAnalysisLogger.info("[!] Running Symbolic Access Analysis") + val symResults: Map[CFGPosition, Map[SymbolicAddress, TwoElement]] = + SymbolicAddressAnalysis(ctx.program, analysisResult.last.interProcConstProp).analyze() + config.analysisDotPath.foreach { s => + val labels = symResults.map { (k, v) => k -> v.toString } + AnalysisResultDotLogger.writeToFile(File(s"${s}_saa.dot"), toDot(ctx.program, labels)) + } + + StaticAnalysisLogger.info("[!] Running DSA Analysis") + + writeToFile(pp_prog(ctx.program), "testo1.il") + val symbolTableEntries: Set[SymbolTableEntry] = ctx.globals ++ ctx.funcEntries + val dsa = DataStructureAnalysis( + ctx.program, + symResults, + analysisResult.last.interProcConstProp, + symbolTableEntries, + ctx.globalOffsets, + ctx.externalFunctions, + reachingDefs, + analysisResult.last.writesToResult, + analysisResult.last.paramResults + ) + dsa.analyze() + + config.analysisDotPath.foreach { s => + dsa.topDown(ctx.program.mainProcedure).toDot + DebugDumpIRLogger.writeToFile(File(s"${s}_main_dsg.dot"), dsa.topDown(ctx.program.mainProcedure).toDot) + } + + Logger.debug("[!] Injecting regions") + val regionInjector = if (config.memoryRegions == MemoryRegionsMode.MRA) { + val injector = RegionInjectorMRA(ctx.program, analysisResult.last.mmmResults) + injector.injectRegions() + Some(injector) + } else if (config.memoryRegions == MemoryRegionsMode.DSA) { + val injector = RegionInjectorDSA(ctx.program, dsa.topDown) + injector.injectRegions() + Some(injector) + } else { + None + } + + assert(invariant.singleCallBlockEnd(ctx.program)) + StaticAnalysisLogger.info(s"[!] Finished indirect call resolution after $iteration iterations") + analysisResult.last.copy( + symbolicAddresses = symResults, + localDSA = dsa.local.toMap, + bottomUpDSA = dsa.bottomUp.toMap, + topDownDSA = dsa.topDown.toMap, + regionInjector = regionInjector + ) + } +} diff --git a/src/main/scala/analysis/AnalysisManager.scala b/src/main/scala/analysis/AnalysisManager.scala new file mode 100644 index 000000000..21993eb80 --- /dev/null +++ b/src/main/scala/analysis/AnalysisManager.scala @@ -0,0 +1,60 @@ +package analysis + +import ir.Program + +import scala.collection.mutable + +/** Analysis manager for caching and invalidating analysis results. + * + * @param program Each analysis manager is defined with respect to a particular program reference, which is always + * passed to the static analyses invoked via this manager. This ensures that the cached analysis results all relate to + * the same program reference. It is then the responsibility of Transforms to clear these results when they are + * invalidated by a modification to this program. + */ +class AnalysisManager(val program: Program) { + + // memoizer to wrap static analyses and cache their results + class Memoizer[ReturnType](analysis: StaticAnalysis[ReturnType]) { + + private var memo: Option[ReturnType] = None + + def invalidate() = { memo = None } + + // allows this memoizer to be called like a function + def apply(): ReturnType = { + // pass this analysis manager and its associated program to the static analysis + if memo.isEmpty then memo = Some(analysis(AnalysisManager.this)) + memo.get + } + } + + enum Invalidation { + case PreserveAll + case ClobberAll + case PreserveSome(toPreserve: Set[Memoizer[?]]) + case ClobberSome(toClobber: Set[Memoizer[?]]) + } + + // todo: not sure if this is the right approach - maybe we should implement convenience methods instead? + export Invalidation.* + + // keep track of all memoizers to ensure we can invalidate all of them + private val memoizers: mutable.Set[Memoizer[?]] = mutable.Set.empty + + // private helper function for creating and storing memoizers + private def register[ReturnType](analysis: StaticAnalysis[ReturnType]): Memoizer[ReturnType] = { + val mem = Memoizer[ReturnType](analysis) + memoizers += mem + return mem + } + + def invalidate(invalidation: Invalidation): Unit = invalidation match { + case PreserveAll => () + case ClobberAll => memoizers.foreach(_.invalidate()) + case PreserveSome(toPreserve) => (memoizers.toSet -- toPreserve).foreach(_.invalidate()) + case ClobberSome(toClobber) => toClobber.foreach(_.invalidate()) + } + + // todo: list of memoizers which can be directly called via this manager + // val exampleAnalysis = register(ExampleAnalysis()) +} diff --git a/src/main/scala/analysis/Legacy.scala b/src/main/scala/analysis/Legacy.scala new file mode 100644 index 000000000..516248842 --- /dev/null +++ b/src/main/scala/analysis/Legacy.scala @@ -0,0 +1,26 @@ +package analysis + +import analysis.Interval as _ +import analysis.data_structure_analysis.* +import ir.* + +import scala.jdk.CollectionConverters.* + +case class StaticAnalysisContext( + intraProcConstProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], + interProcConstProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], + memoryRegionResult: Map[CFGPosition, ((Set[StackRegion], Set[Variable]), Set[HeapRegion])], + vsaResult: Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]], + interLiveVarsResults: Map[CFGPosition, Map[Variable, TwoElement]], + paramResults: Map[Procedure, Set[Variable]], + steensgaardResults: Map[RegisterWrapperEqualSets, Set[RegisterWrapperEqualSets | MemoryRegion]], + mmmResults: MemoryModelMap, + reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], + regionInjector: Option[RegionInjector], + symbolicAddresses: Map[CFGPosition, Map[SymbolicAddress, TwoElement]], + localDSA: Map[Procedure, Graph], + bottomUpDSA: Map[Procedure, Graph], + topDownDSA: Map[Procedure, Graph], + writesToResult: Map[Procedure, Set[GlobalVar]], + ssaResults: Map[CFGPosition, (Map[Variable, FlatElement[Int]], Map[Variable, FlatElement[Int]])] +) diff --git a/src/main/scala/analysis/StaticAnalysis.scala b/src/main/scala/analysis/StaticAnalysis.scala new file mode 100644 index 000000000..ac90c24d4 --- /dev/null +++ b/src/main/scala/analysis/StaticAnalysis.scala @@ -0,0 +1,23 @@ +package analysis + +import util.PerformanceTimer + +/** Provides a consistent interface for static analyses. + * Similar to Transform, but returns a result rather than modifying the IR in-place. + * + * @tparam ReturnType The type of the result that this analysis generates. + * @param name The name of this analysis. + */ +trait StaticAnalysis[ReturnType](val name: String) { + + val t = PerformanceTimer(name) + + protected def implementation(man: AnalysisManager): ReturnType + + def apply(man: AnalysisManager): ReturnType = { + t.checkPoint("start") + val ret = implementation(man) + t.checkPoint("end") + ret + } +} diff --git a/src/main/scala/analysis/data_structure_analysis/DataStructureAnalysis.scala b/src/main/scala/analysis/data_structure_analysis/DataStructureAnalysis.scala index c9981a1f1..8fb1eb369 100644 --- a/src/main/scala/analysis/data_structure_analysis/DataStructureAnalysis.scala +++ b/src/main/scala/analysis/data_structure_analysis/DataStructureAnalysis.scala @@ -8,6 +8,15 @@ import util.assertion.* import scala.collection.mutable +case class DSAContext( + sva: Map[Procedure, SymValues[OSet]], + constraints: Map[Procedure, Set[Constraint]], + local: Map[Procedure, IntervalGraph], + bottomUp: Map[Procedure, IntervalGraph], + topDown: Map[Procedure, IntervalGraph], + globals: Map[IntervalNode, IntervalNode] +) + /** Data Structure Analysis Performs all phases of DSA and stores the results in member variables local, bottom-up, * top-down results in member variables locals, bu and td respectively. * @param program diff --git a/src/main/scala/analysis/data_structure_analysis/IntervalDSA.scala b/src/main/scala/analysis/data_structure_analysis/IntervalDSA.scala index e37a8bed8..e47d30377 100644 --- a/src/main/scala/analysis/data_structure_analysis/IntervalDSA.scala +++ b/src/main/scala/analysis/data_structure_analysis/IntervalDSA.scala @@ -11,7 +11,7 @@ import specification.{ExternalFunction, FuncEntry, SymbolTableEntry} import util.DSAPhase.* import util.LogLevel.INFO import util.assertion.* -import util.{DSAContext, DSALogger, DSAPhase, DSConfig, IRContext, PerformanceTimer} +import util.{DSALogger, DSAPhase, DSConfig, PerformanceTimer} import scala.collection.mutable import scala.collection.mutable.ArrayBuffer diff --git a/src/main/scala/analysis/data_structure_analysis/SymbolicValueAnalysis.scala b/src/main/scala/analysis/data_structure_analysis/SymbolicValueAnalysis.scala index c5a1497df..cd952ae8c 100644 --- a/src/main/scala/analysis/data_structure_analysis/SymbolicValueAnalysis.scala +++ b/src/main/scala/analysis/data_structure_analysis/SymbolicValueAnalysis.scala @@ -5,8 +5,8 @@ import ir.* import ir.cilvisitor.{CILVisitor, DoChildren, SkipChildren, visit_expr} import ir.eval.BitVectorEval.bv2SignedInt import ir.transforms.{AbstractDomain, worklistSolver} +import util.SVALogger as Logger import util.assertion.* -import util.{IRContext, SVALogger as Logger} import scala.annotation.tailrec import scala.collection.{SortedMap, mutable} diff --git a/src/main/scala/analysis/procedure_summaries/SummaryGenerator.scala b/src/main/scala/analysis/procedure_summaries/SummaryGenerator.scala index 5420b3243..8bfc5480f 100644 --- a/src/main/scala/analysis/procedure_summaries/SummaryGenerator.scala +++ b/src/main/scala/analysis/procedure_summaries/SummaryGenerator.scala @@ -2,7 +2,7 @@ package analysis import boogie.{BExpr, TrueBLiteral} import ir.* -import ir.transforms.{SCCCallgraphWorklistSolver, reversePostOrder, worklistSolver} +import ir.transforms.{SCCCallgraphWorklistSolver, Transform, reversePostOrder, worklistSolver} import util.ProcedureSummariesLogger as Logger case class Condition(pred: Predicate, label: Option[String] = None) @@ -209,3 +209,22 @@ class SummaryGenerator(program: Program, parameterForm: Boolean = false) { .filter(_ != TrueBLiteral) } } + +def getGenerateProcedureSummariesTransform(simplified: Boolean): Transform = + Transform( + "GenerateProcedureSummaries", + (ctx, man) => { + val prog = ctx.program + // Need to know modifies clauses to generate summaries, but this is probably out of place (fixme) + val specModifies = ctx.specification.subroutines.map(s => s.name -> s.modifies).toMap + prog.setModifies(specModifies) + + val summaryGenerator = SummaryGenerator(prog, simplified) + for procedure <- prog.procedures if procedure != prog.mainProcedure do + procedure.requires = summaryGenerator.generateRequires(procedure) + procedure.ensures = summaryGenerator.generateEnsures(procedure) + + man.ClobberAll + }, + notice = "Generating Procedure Summaries" + ) diff --git a/src/main/scala/analysis/rely_guarantee_generation/RelyGuaranteeGeneration.scala b/src/main/scala/analysis/rely_guarantee_generation/RelyGuaranteeGeneration.scala index 5a762e105..ad21af665 100644 --- a/src/main/scala/analysis/rely_guarantee_generation/RelyGuaranteeGeneration.scala +++ b/src/main/scala/analysis/rely_guarantee_generation/RelyGuaranteeGeneration.scala @@ -2,6 +2,7 @@ package analysis import ir.* import ir.transforms.* +import util.StaticAnalysisLogger import scala.collection.mutable.Queue @@ -149,3 +150,27 @@ class GuarGenSummaryGenerator[T, S](dom: InterferenceProductDomain[T, S]) // we want to expand the previous postcondition by joining this one dom.pureJoin(prevSummary, resAfter(p.returnBlock.get)) } + +def getGenerateRgConditionsTransform(threads: List[Procedure]): Transform = + Transform( + "GenerateRgConditions", + (ctx, man) => { + type StateLatticeElement = LatticeMap[Variable, analysis.Interval] + type InterferenceLatticeElement = Map[Variable, StateLatticeElement] + val stateLattice = IntervalLatticeExtension() + val stateTransfer = SignedIntervalDomain().transfer + val intDom = ConditionalWritesDomain[StateLatticeElement](stateLattice, stateTransfer) + val relyGuarantees = + RelyGuaranteeGenerator[InterferenceLatticeElement, StateLatticeElement](intDom).generate(threads) + // fixme: these should not be printed to stdout + for ((p, (rely, guar)) <- relyGuarantees) { + StaticAnalysisLogger.info("--- " + p.procName + " " + "-" * 50 + "\n") + StaticAnalysisLogger.info("Rely:") + StaticAnalysisLogger.info(intDom.toString(rely) + "\n") + StaticAnalysisLogger.info("Guarantee:") + StaticAnalysisLogger.info(intDom.toString(guar) + "\n") + } + man.ClobberAll + }, + notice = "Generating Rely-Guarantee Conditions" + ) diff --git a/src/main/scala/ir/IRLoading.scala b/src/main/scala/ir/IRLoading.scala new file mode 100644 index 000000000..d8c216b37 --- /dev/null +++ b/src/main/scala/ir/IRLoading.scala @@ -0,0 +1,223 @@ +package ir + +import Parsers.* +import analysis.Interval as _ +import bap.* +import boogie.* +import com.grammatech.gtirb.proto.IR.IR +import gtirb.* +import ir.* +import org.antlr.v4.runtime.{BailErrorStrategy, CharStreams, CommonTokenStream} +import specification.* +import translating.* +import util.{ILLoadingConfig, Logger} + +import java.io.FileInputStream +import scala.jdk.CollectionConverters.* + +enum FrontendMode { + case Bap + case Gtirb + case Basil +} + +/** Stores the IR Program loaded from the binary and ELF tables, which is modified during analysis and program + * transformation. + */ +case class IRContext( + symbols: List[ELFSymbol], + externalFunctions: Set[ExternalFunction], + globals: Set[SpecGlobal], + funcEntries: Set[FuncEntry], + globalOffsets: Map[BigInt, BigInt], + specification: Specification, + program: Program // internally mutable +) + +object IRLoading { + + /** Create a context from just an IR program. + */ + def load(p: Program): IRContext = { + IRContext( + List.empty, + Set.empty, + Set.empty, + Set.empty, + Map.empty, + IRLoading.loadSpecification(None, p, Set.empty), + p + ) + } + + /** Load a program from files using the provided configuration. + */ + def load(q: ILLoadingConfig): IRContext = { + + val mode = q.frontendMode + if (q.inputFile.endsWith(".gtirb") && !q.gtirbLiftOffline) { + throw IllegalArgumentException(".gtirb input requires --lifter") + } + + val (mainAddress, makeContext) = q.relfFile match { + case Some(relf) => { + + // allow loading elf from inputFile if using GTIRB mode. + val relfData = if (relf == q.inputFile && mode == FrontendMode.Gtirb) { + Logger.info("[!] Using ELF data from GTIRB: " + q.inputFile) + IRLoading.loadGTIRBReadELF(q) + } else { + Logger.info("[!] Using ELF data from relf: " + relf) + IRLoading.loadReadELF(relf, q) + } + + val ReadELFData(symbols, externalFunctions, globals, funcEntries, globalOffsets, mainAddress) = relfData + + def continuation(ctx: IRContext) = + val specification = IRLoading.loadSpecification(q.specFile, ctx.program, globals) + IRContext(symbols, externalFunctions, globals, funcEntries, globalOffsets, specification, ctx.program) + + (Some(mainAddress), continuation) + } + case None if mode == FrontendMode.Gtirb => { + Logger.warn( + "RELF input not provided, this is not recommended! To provide a RELF input, specify --relf or --gts-relf." + ) + (None, (x: IRContext) => x) + } + case None => { + (None, (x: IRContext) => x) + } + } + + val program: IRContext = (mode, mainAddress) match { + case (FrontendMode.Gtirb, _) => + IRLoading.load(loadGTIRB(q.inputFile, mainAddress, q.gtirbLiftOffline, Some(q.mainProcedureName))) + case (FrontendMode.Basil, _) => ir.parsing.ParseBasilIL.loadILFile(q.inputFile) + case (FrontendMode.Bap, None) => throw Exception("relf is required when using BAP input") + case (FrontendMode.Bap, Some(mainAddress)) => { + val bapProgram = loadBAP(q.inputFile) + IRLoading.load(BAPToIR(bapProgram, mainAddress).translate) + } + } + + val ctx = makeContext(program) + mode match { + case FrontendMode.Basil => { + ctx.program.procedures.foreach(_.updateBlockSuffix()) + Logger.info("[!] Disabling PC tracking transforms due to IL input") + } + case _ => { + ir.transforms.PCTracking.applyPCTracking(q.pcTracking, ctx.program) + ctx.program.procedures.foreach(_.normaliseBlockNames()) + ir.transforms.clearParams(ctx.program) + } + } + ctx + } + + def loadBAP(fileName: String): BAPProgram = { + val ADTLexer = BAP_ADTLexer(CharStreams.fromFileName(fileName)) + val tokens = CommonTokenStream(ADTLexer) + val parser = BAP_ADTParser(tokens) + + parser.setBuildParseTree(true) + + BAPLoader().visitProject(parser.project()) + } + + def skipGTIRBMagic(fileName: String): FileInputStream = { + val fIn = FileInputStream(fileName) + (0 to 7).map(_ => fIn.read()).toList match { + case List('G', 'T', 'I', 'R', 'B', _, _, _) => fIn + case o if fileName.endsWith(".gtirb") => { + fIn.close() + throw Exception(s"Did not find gtirb magic at start of file: $o") + } + case _ => { + fIn.close() + FileInputStream(fileName) + } + } + } + + def loadGTIRB( + fileName: String, + mainAddress: Option[BigInt], + gtirbLiftOffline: Boolean, + mainName: Option[String] = None + ): Program = { + val fIn = skipGTIRBMagic(fileName) + val ir = IR.parseFrom(fIn) + val mods = ir.modules + val cfg = ir.cfg.get + + val lifter = + if (gtirbLiftOffline) then OfflineLifterInsnLoader(mods) + else { + ParserMapInsnLoader(mods) + } + + val GTIRBConverter = GTIRBToIR(mods, lifter, cfg, mainAddress, mainName) + GTIRBConverter.createIR() + } + + /** Loads ELF data from the GTIRB input file. */ + def loadGTIRBReadELF(config: ILLoadingConfig): ReadELFData = { + val fIn = skipGTIRBMagic(config.inputFile) + val ir = IR.parseFrom(fIn) + if (ir.modules.length != 1) { + Logger.warn(s"GTIRB file ${config.inputFile} unexpectedly has ${ir.modules.length} modules") + } + + val gtirb = GTIRBResolver(ir.modules.head) + val gtirbRelfLoader = GTIRBReadELF(gtirb) + gtirbRelfLoader.getReadELFData(config.mainProcedureName) + } + + /** + * Loads ELF data from *both* .relf and .gts (if using GTIRB input). If both + * sources load successfully, compares them and warns on any differences. + */ + def loadReadELFWithGTIRB(fileName: String, config: ILLoadingConfig): (ReadELFData, Option[ReadELFData]) = { + val lexer = ReadELFLexer(CharStreams.fromFileName(fileName)) + val tokens = CommonTokenStream(lexer) + val parser = ReadELFParser(tokens) + parser.setErrorHandler(BailErrorStrategy()) + parser.setBuildParseTree(true) + + val relf = ReadELFLoader.visitSyms(parser.syms(), config) + + val gtirbRelf = if (config.inputFile.endsWith(".gts") || config.inputFile.endsWith(".gtirb")) { + val gtirbRelf = loadGTIRBReadELF(config) + GTIRBReadELF.checkReadELFCompatibility(gtirbRelf, relf) + Some(gtirbRelf) + } else { + None + } + + (relf, gtirbRelf) + } + + /** + * Loads ELF data from .relf. + */ + def loadReadELF(fileName: String, config: ILLoadingConfig) = + loadReadELFWithGTIRB(fileName, config)._1 + + def emptySpecification(globals: Set[SpecGlobal]) = + Specification(Set(), globals, Map(), List(), List(), List(), Set()) + + def loadSpecification(filename: Option[String], program: Program, globals: Set[SpecGlobal]): Specification = { + filename match { + case Some(s) => + val specLexer = SpecificationsLexer(CharStreams.fromFileName(s)) + val specTokens = CommonTokenStream(specLexer) + val specParser = SpecificationsParser(specTokens) + specParser.setBuildParseTree(true) + val specLoader = SpecificationLoader(globals, program) + specLoader.visitSpecification(specParser.specification()) + case None => emptySpecification(globals) + } + } +} diff --git a/src/main/scala/ir/eval/InterpretBasilIR.scala b/src/main/scala/ir/eval/InterpretBasilIR.scala index aa7a1afc7..e6a931b97 100644 --- a/src/main/scala/ir/eval/InterpretBasilIR.scala +++ b/src/main/scala/ir/eval/InterpretBasilIR.scala @@ -1,9 +1,9 @@ package ir.eval import boogie.Scope import ir.* +import util.Logger import util.functional.* import util.functional.State.* -import util.{IRContext, Logger} import scala.collection.immutable diff --git a/src/main/scala/ir/eval/InterpretBreakpoints.scala b/src/main/scala/ir/eval/InterpretBreakpoints.scala index 70efcb98a..ad05fc4c6 100644 --- a/src/main/scala/ir/eval/InterpretBreakpoints.scala +++ b/src/main/scala/ir/eval/InterpretBreakpoints.scala @@ -1,9 +1,9 @@ package ir.eval import ir.* +import util.Logger import util.functional.* import util.functional.State.* -import util.{IRContext, Logger} import scala.collection.immutable diff --git a/src/main/scala/ir/eval/InterpretRLimit.scala b/src/main/scala/ir/eval/InterpretRLimit.scala index 3df7c793b..29d21adb7 100644 --- a/src/main/scala/ir/eval/InterpretRLimit.scala +++ b/src/main/scala/ir/eval/InterpretRLimit.scala @@ -1,7 +1,6 @@ package ir.eval -import ir.* -import util.IRContext +import ir.{IRContext, *} import util.functional.* import util.functional.State.* diff --git a/src/main/scala/ir/eval/InterpretTrace.scala b/src/main/scala/ir/eval/InterpretTrace.scala index de4a9e21b..33c0b0012 100644 --- a/src/main/scala/ir/eval/InterpretTrace.scala +++ b/src/main/scala/ir/eval/InterpretTrace.scala @@ -1,8 +1,7 @@ package ir.eval import boogie.Scope -import ir.* -import util.IRContext +import ir.{IRContext, *} import util.functional.* import util.functional.State.* diff --git a/src/main/scala/ir/parsing/Attrib.scala b/src/main/scala/ir/parsing/Attrib.scala index 5265ba3ba..f3f61ba43 100644 --- a/src/main/scala/ir/parsing/Attrib.scala +++ b/src/main/scala/ir/parsing/Attrib.scala @@ -214,7 +214,7 @@ case class SymbolTableInfo( } object SymbolTableInfo { - def from(e: util.IRContext) = { + def from(e: ir.IRContext) = { SymbolTableInfo(e.externalFunctions, e.globals, e.funcEntries, e.globalOffsets) } @@ -352,5 +352,4 @@ case object MemoryAttribData { } yield (strs) } yield (MemoryAttribData(name, address, size.toInt, readOnly, bytes)) } - } diff --git a/src/main/scala/ir/parsing/ParseBasilIL.scala b/src/main/scala/ir/parsing/ParseBasilIL.scala index be8ecc7f3..7b3f19c86 100644 --- a/src/main/scala/ir/parsing/ParseBasilIL.scala +++ b/src/main/scala/ir/parsing/ParseBasilIL.scala @@ -7,17 +7,17 @@ import java.io.{FileReader, Reader, StringReader} object ParseBasilIL { /** - * Combines the parsed declarations and the parsed DSL program into a [[util.IRContext]], + * Combines the parsed declarations and the parsed DSL program into a [[ir.IRContext]], * including resolving the DSL program into a Basil IR program. */ def makeBasilIRContext(decls: Declarations, prog: ir.dsl.EventuallyProgram) = { - util.IRContext( + ir.IRContext( List(), decls.symtab.externalFunctions, decls.symtab.globals, decls.symtab.funcEntries, decls.symtab.globalOffsets, - util.IRLoading.emptySpecification(decls.symtab.globals), + ir.IRLoading.emptySpecification(decls.symtab.globals), prog.resolve ) } @@ -46,12 +46,12 @@ object ParseBasilIL { result } - def loadILFile(filePath: String): util.IRContext = { + def loadILFile(filePath: String): ir.IRContext = { val reader = new FileReader(filePath) loadILReader(reader) } - def loadILString(text: String): util.IRContext = { + def loadILString(text: String): ir.IRContext = { val reader = new StringReader(text) loadILReader(reader) } diff --git a/src/main/scala/ir/parsing/package.scala b/src/main/scala/ir/parsing/package.scala index 5bcba2310..d2ad8aa6e 100644 --- a/src/main/scala/ir/parsing/package.scala +++ b/src/main/scala/ir/parsing/package.scala @@ -70,7 +70,7 @@ package ir * The visitor returns an unresolved Basil DSL structure, [[ir.dsl.EventuallyProgram]]. * 6. In [[ir.parsing.ParseBasilIL.makeBasilIRContext]], the DSL structure is resolved into a * real Basil IR [[ir.Program]] and combined with the [[ir.parsing.Declarations]] - * to produce a [[util.IRContext]]. + * to produce a [[ir.IRContext]]. * */ package object parsing {} diff --git a/src/main/scala/ir/transforms/InlinePLTLaunchpad.scala b/src/main/scala/ir/transforms/InlinePLTLaunchpad.scala index 1cec16dd2..8523c2a03 100644 --- a/src/main/scala/ir/transforms/InlinePLTLaunchpad.scala +++ b/src/main/scala/ir/transforms/InlinePLTLaunchpad.scala @@ -1,22 +1,25 @@ package ir.transforms -import ir.{Procedure, Program} +import ir.Procedure -def inlinePLTLaunchpad(prog: Program) = { - prog.sortProceduresRPO() +val inlinePLTLaunchpad = Transform( + "InlinePLTLaunchpad", + (ctx, man) => { + ctx.program.sortProceduresRPO() - def candidate(p: Procedure): Boolean = - (p.blocks.size <= 4) - && p.calls.size == 1 - && p.calls.forall(_.isExternal.contains(true)) - && p.procName.startsWith("FUN") - && !p.calls.contains(p) + def candidate(p: Procedure): Boolean = + (p.blocks.size <= 4) + && p.calls.size == 1 + && p.calls.forall(_.isExternal.contains(true)) + && p.procName.startsWith("FUN") + && !p.calls.contains(p) - for (p <- prog.procedures.reverse.filter(candidate)) { - p.incomingCalls().foreach { call => - inlineCall(prog, call) + for (p <- ctx.program.procedures.reverse.filter(candidate)) { + p.incomingCalls().foreach { call => + inlineCall(ctx.program, call) + } } - } - - applyRPO(prog) -} + applyRPO(ctx.program) + man.ClobberAll + } +) diff --git a/src/main/scala/ir/transforms/LinuxAssertFail.scala b/src/main/scala/ir/transforms/LinuxAssertFail.scala index b8f41a394..8ee2ed84f 100644 --- a/src/main/scala/ir/transforms/LinuxAssertFail.scala +++ b/src/main/scala/ir/transforms/LinuxAssertFail.scala @@ -1,9 +1,8 @@ package ir.transforms -import ir.* import ir.cilvisitor.* import ir.eval.* -import util.IRContext +import ir.{IRContext, *} import util.functional.* def liftLinuxAssertFail(ctx: IRContext) = { diff --git a/src/main/scala/ir/transforms/PrepareForTranslation.scala b/src/main/scala/ir/transforms/PrepareForTranslation.scala new file mode 100644 index 000000000..3ea7b6980 --- /dev/null +++ b/src/main/scala/ir/transforms/PrepareForTranslation.scala @@ -0,0 +1,78 @@ +package ir.transforms + +import ir.cilvisitor.* +import ir.{Block, invariant} +import util.{BASILConfig, MemoryRegionsMode} + +import scala.collection.mutable + +// run iff arg.isEmpty || (arg.get.memoryRegions == MemoryRegionsMode.Disabled) +val determineRelevantMemory = Transform( + "DetermineRelevantMemory", + (ctx, man) => { + ctx.program.determineRelevantMemory(ctx.globalOffsets) + man.ClobberAll + } +) + +// run iff arg +val stackSubstitution = Transform( + "StackSubstitution", + (ctx, man) => { + visit_prog(StackSubstituter(), ctx.program) + man.ClobberAll + } +) + +val setModifies = Transform( + "SetModifies", + (ctx, man) => { + val specModifies = ctx.specification.subroutines.map(s => s.name -> s.modifies).toMap + ctx.program.setModifies(specModifies) + man.ClobberAll + } +) + +def getRenameBoogieKeywordsTransform(boogieReserved: Set[String]): Transform = + Transform( + "RenameBoogieKeywords", + (ctx, man) => { + visit_prog(BoogieReservedRenamer(boogieReserved), ctx.program) + man.ClobberAll + } + ) + +/** Cull unneccessary information that does not need to be included in the translation, and infer stack regions, and + * add in modifies from the spec. + */ +def getPrepareForTranslationTransform(config: BASILConfig, boogieReserved: Set[String]): Transform = TransformBatch( + "PrepareForTranslation", + List( + determineRelevantMemory.when( + config.staticAnalysis.isEmpty || (config.staticAnalysis.get.memoryRegions == MemoryRegionsMode.Disabled) + ), + getStripUnreachableFunctionsTransform(config.loading.procedureTrimDepth), + stackSubstitution.when( + !config.memoryTransform && + (config.staticAnalysis.isEmpty || (config.staticAnalysis.get.memoryRegions == MemoryRegionsMode.Disabled)) + ), + setModifies, + getRenameBoogieKeywordsTransform(boogieReserved: Set[String]) + ), + postRunChecks = ctx => { + assert(invariant.singleCallBlockEnd(ctx.program)) + // check all blocks with an atomic section exist within the same procedure + val visited = mutable.Set[Block]() + for (p <- ctx.program.procedures) { + for (b <- p.blocks) { + if (!visited.contains(b)) { + if (b.atomicSection.isDefined) { + b.atomicSection.get.getBlocks.foreach { a => assert(a.parent == p) } + visited.addAll(b.atomicSection.get.getBlocks) + } + visited.addOne(b) + } + } + } + } +) diff --git a/src/main/scala/ir/transforms/ProcedureParameters.scala b/src/main/scala/ir/transforms/ProcedureParameters.scala index 552d4eac6..0ad90f788 100644 --- a/src/main/scala/ir/transforms/ProcedureParameters.scala +++ b/src/main/scala/ir/transforms/ProcedureParameters.scala @@ -111,7 +111,7 @@ object DefinedOnAllPaths { } } -def liftProcedureCallAbstraction(ctx: util.IRContext): util.IRContext = { +def liftProcedureCallAbstraction(ctx: ir.IRContext): ir.IRContext = { transforms.clearParams(ctx.program) diff --git a/src/main/scala/ir/transforms/ReplaceReturn.scala b/src/main/scala/ir/transforms/ReplaceReturn.scala index 82b7b5064..2b5fb9563 100644 --- a/src/main/scala/ir/transforms/ReplaceReturn.scala +++ b/src/main/scala/ir/transforms/ReplaceReturn.scala @@ -141,3 +141,12 @@ def establishProcedureDiamondForm(program: Program, doSimplify: Boolean = false) cilvisitor.visit_prog(ConvertSingleReturn(), program) debugAssert(ir.invariant.programDiamondForm(program)) } + +def getEstablishProcedureDiamondFormTransform(doSimplify: Boolean): Transform = + Transform( + "EstablishProcedureDiamondForm", + (ctx, man) => { + establishProcedureDiamondForm(ctx.program, doSimplify) + man.ClobberAll + } + ) diff --git a/src/main/scala/ir/transforms/Simp.scala b/src/main/scala/ir/transforms/Simp.scala index b6aa54d46..df0cfbee2 100644 --- a/src/main/scala/ir/transforms/Simp.scala +++ b/src/main/scala/ir/transforms/Simp.scala @@ -1,4 +1,5 @@ package ir.transforms + import ir.* import ir.cilvisitor.* import ir.eval.{AlgebraicSimplifications, AssumeConditionSimplifications, simplifyExprFixpoint} @@ -867,6 +868,22 @@ def coalesceBlocks(p: Program): Boolean = { didAny } +val coalesceBlocksOnce = Transform( + "CoalesceBlocksOnce", + (ctx, man) => { + coalesceBlocks(ctx.program) + man.ClobberAll + } +) + +val coalesceBlocksFixpoint = Transform( + "CoalesceBlocksFixpoint", + (ctx, man) => { + while (coalesceBlocks(ctx.program)) {} + man.ClobberAll + } +) + def removeDeadInParams(p: Program): Boolean = { var modified = false debugAssert(invariant.correctCalls(p)) @@ -1178,6 +1195,14 @@ def applyRPO(p: Program) = { } } +val applyRpoTransform = Transform( + "ApplyRPO", + (ctx, man) => { + applyRPO(man.program) + man.ClobberAll + } +) + object getProcFrame { class GetProcFrame(frames: Procedure => Set[Memory]) extends CILVisitor { var modifies = Set[Memory]() @@ -1957,7 +1982,7 @@ class DefinitelyExits(knownExit: Set[Procedure]) extends ProcedureSummaryGenerat } } -def findDefinitelyExits(p: Program) = { +def findDefinitelyExits(p: Program): ProcReturnInfo = { val exit = p.procedures.filter(p => p.procName == "exit").toSet val dom = DefinitelyExits(exit) val ldom = ProcExitsDomain(x => false) @@ -1973,6 +1998,59 @@ def findDefinitelyExits(p: Program) = { ) } +val replaceJumpsInNonReturningProcs = Transform( + "ReplaceJumpsInNonReturningProcs", + (ctx, man) => { + val nonReturning = findDefinitelyExits(ctx.program) + ctx.program.mainProcedure.foreach { + case d: DirectCall if nonReturning.nonreturning.contains(d.target) => d.parent.replaceJump(Return()) + case _ => + } + man.ClobberAll + } +) + +val removeExternalFunctionReferences = Transform( + "RemoveExternalFunctionReferences", + (ctx, man) => { + val externalNames = ctx.externalFunctions.map(_.name) + val unqualifiedNames = externalNames.filter(_.contains('@')).map(_.split('@')(0)) + removeBodyOfExternal(externalNames ++ unqualifiedNames)(ctx.program) + for (p <- ctx.program.procedures) { + p.isExternal = Some( + ctx.externalFunctions.exists(e => e.name == p.procName || p.address.contains(e.offset)) || p.isExternal + .getOrElse(false) + ) + } + man.ClobberAll + } +) + +def getDoCleanupTransform(doSimplify: Boolean): Transform = TransformBatch( + "DoCleanup", + List( + makeProcEntriesNonLoops, + // useful for ReplaceReturns + // (pushes single block with `Unreachable` into its predecessor) + coalesceBlocksFixpoint, + applyRpoTransform, + replaceJumpsInNonReturningProcs, + getEstablishProcedureDiamondFormTransform(doSimplify), + removeExternalFunctionReferences + ), + notice = "Removing external function calls", // fixme: is this all the cleanup is doing? + postRunChecks = ctx => { + assert(invariant.singleCallBlockEnd(ctx.program)) + assert(invariant.cfgCorrect(ctx.program)) + assert(invariant.blocksUniqueToEachProcedure(ctx.program)) + assert(invariant.procEntryNoIncoming(ctx.program)) + } +) + +// these are called a lot so it's useful to create them here rather than generating many copies on the fly +lazy val doCleanupWithSimplify = getDoCleanupTransform(true) +lazy val doCleanupWithoutSimplify = getDoCleanupTransform(false) + class Simplify(val res: Boolean => Variable => Option[Expr], val initialBlock: Block = null) extends CILVisitor { var madeAnyChange = false @@ -2196,14 +2274,20 @@ def removeTriviallyDeadBranches(p: Program, removeAllUnreachableBlocks: Boolean dead.nonEmpty } -// ensure procedure entry has no incoming jumps, if it does replace with new -// block jumping to the old procedure entry -def makeProcEntryNonLoop(p: Procedure) = { - if (p.entryBlock.exists(_.prevBlocks.nonEmpty)) { - val nb = Block(p.name + "_entry") - p.addBlock(nb) - val eb = p.entryBlock.get - nb.replaceJump(GoTo(eb)) - p.entryBlock = nb +val makeProcEntriesNonLoops = Transform( + "MakeProcEntriesNonLoops", + (ctx, man) => { + ctx.program.procedures.foreach(p => { + // ensure procedure entry has no incoming jumps, if it does replace with new + // block jumping to the old procedure entry + if (p.entryBlock.exists(_.prevBlocks.nonEmpty)) { + val nb = Block(p.name + "_entry") + p.addBlock(nb) + val eb = p.entryBlock.get + nb.replaceJump(GoTo(eb)) + p.entryBlock = nb + } + }) + man.ClobberAll } -} +) diff --git a/src/main/scala/ir/transforms/SimplifyPipeline.scala b/src/main/scala/ir/transforms/SimplifyPipeline.scala new file mode 100644 index 000000000..0a23d884b --- /dev/null +++ b/src/main/scala/ir/transforms/SimplifyPipeline.scala @@ -0,0 +1,166 @@ +package ir.transforms + +import analysis.{AnalysisManager, Interval as _, *} +import ir.* +import translating.* +import translating.PrettyPrinter.* +import util.{AnalysisResultDotLogger, DebugDumpIRLogger, Logger, PerformanceTimer, StaticAnalysisConfig} + +import java.io.{BufferedWriter, File, FileWriter} + +def doSimplify(ctx: IRContext, config: Option[StaticAnalysisConfig]): Unit = { + // writeToFile(dotBlockGraph(program, program.filter(_.isInstanceOf[Block]).map(b => b -> b.toString).toMap), s"blockgraph-before-simp.dot") + Logger.info("[!] Running Simplify") + val timer = PerformanceTimer("Simplify") + val program = ctx.program + + val foundLoops = LoopDetector.identify_loops(program) + val newLoops = foundLoops.reducibleTransformIR() + newLoops.updateIrWithLoops() + + for (p <- program.procedures) { + p.normaliseBlockNames() + } + + ctx.program.sortProceduresRPO() + + transforms.liftSVComp(ctx.program) + + config.foreach { + _.dumpILToPath.foreach { s => + DebugDumpIRLogger.writeToFile(File(s"${s}_il-before-simp.il"), pp_prog(program)) + } + } + + transforms.applyRPO(program) + + // example of printing a simple analysis + + transforms.removeEmptyBlocks(program) + transforms.coalesceBlocks(program) + transforms.removeEmptyBlocks(program) + + // transforms.coalesceBlocksCrossBranchDependency(program) + config.foreach { + _.analysisDotPath.foreach { s => + DebugDumpIRLogger.writeToFile(File(s"${s}_blockgraph-before-dsa.dot"), dotBlockGraph(program.mainProcedure)) + } + } + + Logger.info("[!] Simplify :: DynamicSingleAssignment") + config.foreach { + _.dumpILToPath.foreach { s => + DebugDumpIRLogger.writeToFile(File(s"${s}_il-before-dsa.il"), pp_prog(program)) + } + } + + transforms.OnePassDSA().applyTransform(program) + + // fixme: this used to be a plain function but now we have to supply an analysis manager! + transforms.inlinePLTLaunchpad(ctx, AnalysisManager(ctx.program)) + + transforms.removeEmptyBlocks(program) + + config.foreach { + _.analysisDotPath.foreach { s => + AnalysisResultDotLogger.writeToFile( + File(s"${s}_blockgraph-after-dsa.dot"), + dotBlockGraph( + program, + (program.collect { case b: Block => + b -> pp_block(b) + }).toMap + ) + ) + } + } + config.foreach { + _.dumpILToPath.foreach { s => + DebugDumpIRLogger.writeToFile(File(s"${s}_il-after-dsa.il"), pp_prog(program)) + } + } + + if (ir.eval.SimplifyValidation.validate) { + Logger.info("DSA no uninitialised") + assert(invariant.allVariablesAssignedIndex(program)) + // Logger.info("Live vars difftest") + // val tipLiveVars : Map[CFGPosition, Set[Variable]] = analysis.IntraLiveVarsAnalysis(program).analyze() + // assert(program.procedures.forall(transforms.difftestLiveVars(_, tipLiveVars))) + + Logger.info("DSA Check") + val x = program.procedures.forall(transforms.rdDSAProperty) + assert(x) + Logger.info("DSA Check passed") + assert(invariant.singleCallBlockEnd(program)) + assert(invariant.cfgCorrect(program)) + assert(invariant.blocksUniqueToEachProcedure(program)) + } + + config.foreach { + _.dumpILToPath.foreach { s => + DebugDumpIRLogger.writeToFile(File(s"${s}_il-before-copyprop.il"), pp_prog(program)) + } + } + + // brute force run the analysis twice because it cleans up more stuff + // assert(program.procedures.forall(transforms.rdDSAProperty)) + config.foreach { + _.analysisDotPath.foreach { s => + AnalysisResultDotLogger.writeToFile( + File(s"${s}_blockgraph-before-copyprop.dot"), + dotBlockGraph(program.mainProcedure) + ) + } + } + Logger.info("Copyprop Start") + transforms.copyPropParamFixedPoint(program, ctx.globalOffsets) + + transforms.fixupGuards(program) + transforms.removeDuplicateGuard(program) + config.foreach { + _.analysisDotPath.foreach { s => + AnalysisResultDotLogger.writeToFile(File(s"${s}_blockgraph-after-simp.dot"), dotBlockGraph(program.mainProcedure)) + } + } + + transforms.liftLinuxAssertFail(ctx) + + // assert(program.procedures.forall(transforms.rdDSAProperty)) + + assert(invariant.blockUniqueLabels(program)) + Logger.info(s"CopyProp ${timer.checkPoint("Simplify")} ms ") + + config.foreach { + _.dumpILToPath.foreach { s => + DebugDumpIRLogger.writeToFile(File(s"${s}_il-after-copyprop.il"), pp_prog(program)) + } + } + + // val x = program.procedures.forall(transforms.rdDSAProperty) + // assert(x) + if (ir.eval.SimplifyValidation.validate) { + Logger.info("DSA Check (after transform)") + val x = program.procedures.forall(transforms.rdDSAProperty) + assert(x) + Logger.info("DSA Check succeeded") + } + // run this after cond recovery because sign bit calculations often need high bits + // which go away in high level conss + config.foreach { + _.dumpILToPath.foreach { s => + DebugDumpIRLogger.writeToFile(File(s"${s}_il-after-slices.il"), pp_prog(program)) + } + } + + // re-apply dsa + // transforms.OnePassDSA().applyTransform(program) + + if (ir.eval.SimplifyValidation.validate) { + Logger.info("[!] Simplify :: Writing simplification validation") + val w = BufferedWriter(FileWriter("rewrites.smt2")) + ir.eval.SimplifyValidation.makeValidation(w) + w.close() + } + + Logger.info("[!] Simplify :: finished") +} diff --git a/src/main/scala/ir/transforms/StripUnreachableFunctions.scala b/src/main/scala/ir/transforms/StripUnreachableFunctions.scala index f4a94640f..58e314993 100644 --- a/src/main/scala/ir/transforms/StripUnreachableFunctions.scala +++ b/src/main/scala/ir/transforms/StripUnreachableFunctions.scala @@ -1,11 +1,13 @@ package ir.transforms + import ir.* +import util.Logger import util.assertion.* import collection.mutable // This shouldn't be run before indirect calls are resolved -def stripUnreachableFunctions(p: Program, depth: Int = Int.MaxValue): Unit = { +def stripUnreachableFunctions(p: Program, depth: Int): Unit = { val procedureCalleeNames = p.procedures.map(f => f -> f.calls).toMap val toVisit: mutable.LinkedHashSet[(Int, Procedure)] = mutable.LinkedHashSet((0, p.mainProcedure)) @@ -49,3 +51,33 @@ def stripUnreachableFunctions(p: Program, depth: Int = Int.MaxValue): Unit = { debugAssert(invariant.cfgCorrect(p)) } + +def getStripUnreachableFunctionsTransform(depth: Int): Transform = + Transform( + "StripUnreachableFunctions", + (ctx, man) => { + val before = ctx.program.procedures.size + stripUnreachableFunctions(ctx.program, depth) + Logger.info( + s"[!] Removed ${before - ctx.program.procedures.size} functions (${ctx.program.procedures.size} remaining)" + ) + + /* Fixme: Since refactoring RunUtils, the following code runs when this transform is invoked by the + loadAndTranslate function, whereas it used to only run when invoked by the prepareForTranslation function. I don't + know if this is problematic. */ + val dupProcNames = ctx.program.procedures.groupBy(_.name).filter((_, p) => p.size > 1).toList.flatMap(_(1)) + assert(dupProcNames.isEmpty) + + ctx.program.procedures.foreach(p => + p.blocks.foreach(b => { + b.jump match { + case GoTo(targs, _) if targs.isEmpty => + Logger.warn(s"block ${b.label} in subroutine ${p.name} has no outgoing edges") + case _ => () + } + }) + ) + man.ClobberAll + }, + notice = "Stripping Unreachable" + ) diff --git a/src/main/scala/ir/transforms/Transform.scala b/src/main/scala/ir/transforms/Transform.scala new file mode 100644 index 000000000..9dcf151b4 --- /dev/null +++ b/src/main/scala/ir/transforms/Transform.scala @@ -0,0 +1,122 @@ +package ir.transforms + +import analysis.AnalysisManager +import ir.{IRContext, dotBlockGraph} +import translating.PrettyPrinter.pp_prog +import util.{DebugDumpIRLogger, Logger, PerformanceTimer} + +import java.io.File + +// transforms accept Log instances which specify what kind of logs to dump +trait Log { + def dump(ctx: IRContext, transformName: String): Unit +} + +// dumps a blockgraph log +case class BlockgraphLog(filenamePrefix: String) extends Log { + def dump(ctx: IRContext, transformName: String): Unit = + DebugDumpIRLogger.writeToFile( + File(s"${filenamePrefix}_blockgraph-${transformName}.dot"), + dotBlockGraph(ctx.program.mainProcedure) + ) +} + +// dumps an IR log +case class IrLog(filenamePrefix: String) extends Log { + def dump(ctx: IRContext, transformName: String): Unit = + DebugDumpIRLogger.writeToFile(File(s"${filenamePrefix}_il-${transformName}.il"), pp_prog(ctx.program)) +} + +/** This case class defines the interface and configurable parameters for transform passes that modify the basil ir + * context in-place. It is designed to be read-only to avoid inter-dependencies between uses of the same transform + * instance. To configure the behaviour of a transform (e.g. based on arguments to the basil program), we supply + * a set of methods that each return a copy of the transform with a particular behaviour tweaked, such as which logs + * to dump before and after the transform runs, whether to log performance, or whether to disable the transform. + * + * @param name Human-readable name of the transform; it is used in the names of generated log files. + * @param implementation The function to invoke when this transform is called. + * @param notice Optional message to log upon invocation of this transform (for important transforms). + * @param postRunChecks Optional code to run after performance has been measured but before any logs are dumped. + */ +case class Transform( + name: String, + implementation: (ctx: IRContext, man: AnalysisManager) => man.Invalidation, + notice: String = "", + postRunChecks: IRContext => Unit = _ => () +) { + /* The following fields are configurable via the below methods, which return copies of the transform. + Every transform can be assumed to have these defaults unless it was created via one of these methods. */ + + // set to false to make the apply method do nothing + private var enabled: Boolean = true + // set to true to have the performance of this transform be measured and dumped with a PerformanceTimer + private var logPerformance: Boolean = false + // a set of log types to dump before and after running the transform + private var logsToDump: Set[Log] = Set.empty + + def when(cond: Boolean): Transform = { + val cp = copy() + cp.enabled = cond + return cp + } + + def unless(cond: Boolean): Transform = { + val cp = copy() + cp.enabled = !cond + return cp + } + + def timeIf(cond: Boolean): Transform = { + val cp = copy() + cp.logPerformance = cond + return cp + } + + def withLogs(logs: Set[Log]): Transform = { + val cp = copy() + cp.logsToDump = logs + return cp + } + + /** Applies this transform to the given ir context. This is effectively a wrapper for the `implementation` function + * that handles all of our configurable behaviour and the invalidation of analysis results. + * + * @param ctx The ir context to transform. + * @param man The analysis manager through which to access and invalidate analysis results. + */ + def apply(ctx: IRContext, man: AnalysisManager): Unit = { + if (!enabled) return + if (notice != "") Logger.info(s"[!] ${notice}") + if (man.program ne ctx.program) { + // the program we are transforming should be the same one for which the analysis results were produced + throw new RuntimeException( + s"Transform '$name' was passed an AnalysisManager of an IR Program with a different " + + s"reference value than the program being transformed." + ) + } + logsToDump.foreach(_.dump(ctx, s"before-$name")) + val timer: PerformanceTimer = if logPerformance then PerformanceTimer(name) else null + val invalidation = implementation(ctx, man) + if logPerformance then timer.checkPoint("delta") + postRunChecks(ctx) + logsToDump.foreach(_.dump(ctx, s"after-$name")) + man.invalidate(invalidation) + } +} + +// helper method for constructing transforms that are sequences of other transforms +// we prefer this over invoking transforms in the implementations of other transforms +def TransformBatch( + name: String, + transforms: List[Transform], + notice: String = "", + postRunChecks: IRContext => Unit = _ => () +): Transform = Transform( + name, + (ctx, man) => { + transforms.foreach(_(ctx, man)) + man.PreserveAll + }, + notice, + postRunChecks +) diff --git a/src/main/scala/translating/PrettyPrinter.scala b/src/main/scala/translating/PrettyPrinter.scala index 5853b3adb..4668b2e91 100644 --- a/src/main/scala/translating/PrettyPrinter.scala +++ b/src/main/scala/translating/PrettyPrinter.scala @@ -11,7 +11,7 @@ private val localSigils = false object PrettyPrinter { - type PrettyPrintable = Program | Procedure | Statement | Jump | Command | Block | Expr | util.IRContext + type PrettyPrintable = Program | Procedure | Statement | Jump | Command | Block | Expr | IRContext extension (b: BigInt) { def pprint: String = "0x%x".format(b) @@ -24,10 +24,10 @@ object PrettyPrinter { case e: Block => pp_block(e) case e: Procedure => pp_proc(e) case e: Program => pp_prog(e) - case e: util.IRContext => pp_irctx(e) + case e: IRContext => pp_irctx(e) } - def pp_irctx(e: util.IRContext) = BasilIRPrettyPrinter().vcontext(e).toString + def pp_irctx(e: IRContext) = BasilIRPrettyPrinter().vcontext(e).toString def pp_expr(e: Expr) = BasilIRPrettyPrinter()(e) def pp_stmt(s: Statement) = BasilIRPrettyPrinter()(s) def pp_cmd(c: Command) = c match { @@ -230,7 +230,7 @@ class BasilIRPrettyPrinter( }.toSet } - def vcontext(i: util.IRContext) = { + def vcontext(i: IRContext) = { val prog = vprog(i.program) import ir.parsing.Attrib import ir.parsing.MemoryAttribData diff --git a/src/main/scala/util/BASILConfig.scala b/src/main/scala/util/BASILConfig.scala index ab024a836..49c7d0d8d 100644 --- a/src/main/scala/util/BASILConfig.scala +++ b/src/main/scala/util/BASILConfig.scala @@ -1,5 +1,7 @@ package util +import ir.{FrontendMode, IRContext} + enum ProcRelyVersion { case Function, IfCommandContradiction } diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index eca07cdee..16645f19d 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -1,24 +1,18 @@ package util -import Parsers.* import analysis.data_structure_analysis.* -import analysis.{Interval as _, *} -import bap.* +import analysis.{AnalysisManager, Interval as _, *} import boogie.* -import com.grammatech.gtirb.proto.IR.IR -import gtirb.{GTIRBReadELF, GTIRBResolver} import ir.* import ir.dsl.given import ir.eval.* -import ir.transforms.MemoryTransform -import org.antlr.v4.runtime.{BailErrorStrategy, CharStreams, CommonTokenStream} -import specification.* +import ir.transforms.* import translating.* import translating.PrettyPrinter.* import util.LogLevel.INFO import util.{DebugDumpIRLogger, Logger} -import java.io.{BufferedWriter, File, FileInputStream, FileWriter, PrintWriter} +import java.io.{BufferedWriter, File, FileWriter, PrintWriter} import java.nio.file.{Files, Paths} import scala.collection.mutable import scala.collection.mutable.ArrayBuffer @@ -29,55 +23,6 @@ import cilvisitor.* /** This file contains the main program execution. See RunUtils.loadAndTranslate for the high-level process. */ -/** Stores the IR Program loaded from the binary and ELF tables, which is modified during analysis and program - * transformation. - */ -case class IRContext( - symbols: List[ELFSymbol], - externalFunctions: Set[ExternalFunction], - globals: Set[SpecGlobal], - funcEntries: Set[FuncEntry], - globalOffsets: Map[BigInt, BigInt], - specification: Specification, - program: Program // internally mutable -) - -enum FrontendMode { - case Bap - case Gtirb - case Basil -} - -/** Stores the results of the static analyses. - */ -case class StaticAnalysisContext( - intraProcConstProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], - interProcConstProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], - memoryRegionResult: Map[CFGPosition, ((Set[StackRegion], Set[Variable]), Set[HeapRegion])], - vsaResult: Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]], - interLiveVarsResults: Map[CFGPosition, Map[Variable, TwoElement]], - paramResults: Map[Procedure, Set[Variable]], - steensgaardResults: Map[RegisterWrapperEqualSets, Set[RegisterWrapperEqualSets | MemoryRegion]], - mmmResults: MemoryModelMap, - reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], - regionInjector: Option[RegionInjector], - symbolicAddresses: Map[CFGPosition, Map[SymbolicAddress, TwoElement]], - localDSA: Map[Procedure, Graph], - bottomUpDSA: Map[Procedure, Graph], - topDownDSA: Map[Procedure, Graph], - writesToResult: Map[Procedure, Set[GlobalVar]], - ssaResults: Map[CFGPosition, (Map[Variable, FlatElement[Int]], Map[Variable, FlatElement[Int]])] -) - -case class DSAContext( - sva: Map[Procedure, SymValues[OSet]], - constraints: Map[Procedure, Set[Constraint]], - local: Map[Procedure, IntervalGraph], - bottomUp: Map[Procedure, IntervalGraph], - topDown: Map[Procedure, IntervalGraph], - globals: Map[IntervalNode, IntervalNode] -) - /** Results of the main program execution. */ case class BASILResult( @@ -87,656 +32,6 @@ case class BASILResult( boogie: ArrayBuffer[BProgram] ) -/** Tools for loading the IR program into an IRContext. - */ -object IRLoading { - - /** Create a context from just an IR program. - */ - def load(p: Program): IRContext = { - IRContext( - List.empty, - Set.empty, - Set.empty, - Set.empty, - Map.empty, - IRLoading.loadSpecification(None, p, Set.empty), - p - ) - } - - /** Load a program from files using the provided configuration. - */ - def load(q: ILLoadingConfig): IRContext = { - - val mode = q.frontendMode - if (q.inputFile.endsWith(".gtirb") && !q.gtirbLiftOffline) { - throw IllegalArgumentException(".gtirb input requires --lifter") - } - - val (mainAddress, makeContext) = q.relfFile match { - case Some(relf) => { - - // allow loading elf from inputFile if using GTIRB mode. - val relfData = if (relf == q.inputFile && mode == FrontendMode.Gtirb) { - Logger.info("[!] Using ELF data from GTIRB: " + q.inputFile) - IRLoading.loadGTIRBReadELF(q) - } else { - Logger.info("[!] Using ELF data from relf: " + relf) - IRLoading.loadReadELF(relf, q) - } - - val ReadELFData(symbols, externalFunctions, globals, funcEntries, globalOffsets, mainAddress) = relfData - - def continuation(ctx: IRContext) = - val specification = IRLoading.loadSpecification(q.specFile, ctx.program, globals) - IRContext(symbols, externalFunctions, globals, funcEntries, globalOffsets, specification, ctx.program) - - (Some(mainAddress), continuation) - } - case None if mode == FrontendMode.Gtirb => { - Logger.warn( - "RELF input not provided, this is not recommended! To provide a RELF input, specify --relf or --gts-relf." - ) - (None, (x: IRContext) => x) - } - case None => { - (None, (x: IRContext) => x) - } - } - - val program: IRContext = (mode, mainAddress) match { - case (FrontendMode.Gtirb, _) => - IRLoading.load(loadGTIRB(q.inputFile, mainAddress, q.gtirbLiftOffline, Some(q.mainProcedureName))) - case (FrontendMode.Basil, _) => ir.parsing.ParseBasilIL.loadILFile(q.inputFile) - case (FrontendMode.Bap, None) => throw Exception("relf is required when using BAP input") - case (FrontendMode.Bap, Some(mainAddress)) => { - val bapProgram = loadBAP(q.inputFile) - IRLoading.load(BAPToIR(bapProgram, mainAddress).translate) - } - } - - val ctx = makeContext(program) - mode match { - case FrontendMode.Basil => { - ctx.program.procedures.foreach(_.updateBlockSuffix()) - Logger.info("[!] Disabling PC tracking transforms due to IL input") - } - case _ => { - ir.transforms.PCTracking.applyPCTracking(q.pcTracking, ctx.program) - ctx.program.procedures.foreach(_.normaliseBlockNames()) - ir.transforms.clearParams(ctx.program) - } - } - ctx - } - - def loadBAP(fileName: String): BAPProgram = { - val ADTLexer = BAP_ADTLexer(CharStreams.fromFileName(fileName)) - val tokens = CommonTokenStream(ADTLexer) - val parser = BAP_ADTParser(tokens) - - parser.setBuildParseTree(true) - - BAPLoader().visitProject(parser.project()) - } - - def skipGTIRBMagic(fileName: String): FileInputStream = { - val fIn = FileInputStream(fileName) - (0 to 7).map(_ => fIn.read()).toList match { - case List('G', 'T', 'I', 'R', 'B', _, _, _) => fIn - case o if fileName.endsWith(".gtirb") => { - fIn.close() - throw Exception(s"Did not find gtirb magic at start of file: $o") - } - case _ => { - fIn.close() - FileInputStream(fileName) - } - } - } - - def loadGTIRB( - fileName: String, - mainAddress: Option[BigInt], - gtirbLiftOffline: Boolean, - mainName: Option[String] = None - ): Program = { - val fIn = skipGTIRBMagic(fileName) - val ir = IR.parseFrom(fIn) - val mods = ir.modules - val cfg = ir.cfg.get - - val lifter = - if (gtirbLiftOffline) then OfflineLifterInsnLoader(mods) - else { - ParserMapInsnLoader(mods) - } - - val GTIRBConverter = GTIRBToIR(mods, lifter, cfg, mainAddress, mainName) - GTIRBConverter.createIR() - } - - /** Loads ELF data from the GTIRB input file. */ - def loadGTIRBReadELF(config: ILLoadingConfig): ReadELFData = { - val fIn = skipGTIRBMagic(config.inputFile) - val ir = IR.parseFrom(fIn) - if (ir.modules.length != 1) { - Logger.warn(s"GTIRB file ${config.inputFile} unexpectedly has ${ir.modules.length} modules") - } - - val gtirb = GTIRBResolver(ir.modules.head) - val gtirbRelfLoader = GTIRBReadELF(gtirb) - gtirbRelfLoader.getReadELFData(config.mainProcedureName) - } - - /** - * Loads ELF data from *both* .relf and .gts (if using GTIRB input). If both - * sources load successfully, compares them and warns on any differences. - */ - def loadReadELFWithGTIRB(fileName: String, config: ILLoadingConfig): (ReadELFData, Option[ReadELFData]) = { - val lexer = ReadELFLexer(CharStreams.fromFileName(fileName)) - val tokens = CommonTokenStream(lexer) - val parser = ReadELFParser(tokens) - parser.setErrorHandler(BailErrorStrategy()) - parser.setBuildParseTree(true) - - val relf = ReadELFLoader.visitSyms(parser.syms(), config) - - val gtirbRelf = if (config.inputFile.endsWith(".gts") || config.inputFile.endsWith(".gtirb")) { - val gtirbRelf = loadGTIRBReadELF(config) - GTIRBReadELF.checkReadELFCompatibility(gtirbRelf, relf) - Some(gtirbRelf) - } else { - None - } - - (relf, gtirbRelf) - } - - /** - * Loads ELF data from .relf. - */ - def loadReadELF(fileName: String, config: ILLoadingConfig) = - loadReadELFWithGTIRB(fileName, config)._1 - - def emptySpecification(globals: Set[SpecGlobal]) = - Specification(Set(), globals, Map(), List(), List(), List(), Set()) - - def loadSpecification(filename: Option[String], program: Program, globals: Set[SpecGlobal]): Specification = { - filename match { - case Some(s) => - val specLexer = SpecificationsLexer(CharStreams.fromFileName(s)) - val specTokens = CommonTokenStream(specLexer) - val specParser = SpecificationsParser(specTokens) - specParser.setBuildParseTree(true) - val specLoader = SpecificationLoader(globals, program) - specLoader.visitSpecification(specParser.specification()) - case None => emptySpecification(globals) - } - } -} - -/** Methods related to transforming the IR `Program` in-place. - * - * These operate over the IRContext, and possibly use static analysis results. - */ -object IRTransform { - val boogieReserved: Set[String] = Set("free") - - /** Initial cleanup before analysis. - */ - def doCleanup(ctx: IRContext, doSimplify: Boolean = false): IRContext = { - Logger.info("[!] Removing external function calls") - // Remove external function references (e.g. @printf) - val externalNames = ctx.externalFunctions.map(e => e.name) - val externalNamesLibRemoved = mutable.Set[String]() - externalNamesLibRemoved.addAll(externalNames) - - for (e <- externalNames) { - if (e.contains('@')) { - externalNamesLibRemoved.add(e.split('@')(0)) - } - } - - ctx.program.procedures.foreach(ir.transforms.makeProcEntryNonLoop) - - // useful for ReplaceReturns - // (pushes single block with `Unreachable` into its predecessor) - while (transforms.coalesceBlocks(ctx.program)) {} - - transforms.applyRPO(ctx.program) - val nonReturning = transforms.findDefinitelyExits(ctx.program) - ctx.program.mainProcedure.foreach { - case d: DirectCall if nonReturning.nonreturning.contains(d.target) => d.parent.replaceJump(Return()) - case _ => - } - - transforms.establishProcedureDiamondForm(ctx.program, doSimplify) - - ir.transforms.removeBodyOfExternal(externalNamesLibRemoved.toSet)(ctx.program) - for (p <- ctx.program.procedures) { - p.isExternal = Some( - ctx.externalFunctions.exists(e => e.name == p.procName || p.address.contains(e.offset)) || p.isExternal - .getOrElse(false) - ) - } - - assert(invariant.singleCallBlockEnd(ctx.program)) - assert(invariant.cfgCorrect(ctx.program)) - assert(invariant.blocksUniqueToEachProcedure(ctx.program)) - assert(invariant.procEntryNoIncoming(ctx.program)) - ctx - } - - /** Cull unneccessary information that does not need to be included in the translation, and infer stack regions, and - * add in modifies from the spec. - */ - def prepareForTranslation(config: BASILConfig, ctx: IRContext): Unit = { - if (config.staticAnalysis.isEmpty || (config.staticAnalysis.get.memoryRegions == MemoryRegionsMode.Disabled)) { - ctx.program.determineRelevantMemory(ctx.globalOffsets) - } - - Logger.info("[!] Stripping unreachable") - val before = ctx.program.procedures.size - transforms.stripUnreachableFunctions(ctx.program, config.loading.procedureTrimDepth) - Logger.info( - s"[!] Removed ${before - ctx.program.procedures.size} functions (${ctx.program.procedures.size} remaining)" - ) - val dupProcNames = ctx.program.procedures.groupBy(_.name).filter((_, p) => p.size > 1).toList.flatMap(_(1)) - assert(dupProcNames.isEmpty) - - ctx.program.procedures.foreach(p => - p.blocks.foreach(b => { - b.jump match { - case GoTo(targs, _) if targs.isEmpty => - Logger.warn(s"block ${b.label} in subroutine ${p.name} has no outgoing edges") - case _ => () - } - }) - ) - - if ( - !config.memoryTransform && (config.staticAnalysis.isEmpty || (config.staticAnalysis.get.memoryRegions == MemoryRegionsMode.Disabled)) - ) { - visit_prog(ir.transforms.StackSubstituter(), ctx.program) - } - - val specModifies = ctx.specification.subroutines.map(s => s.name -> s.modifies).toMap - ctx.program.setModifies(specModifies) - - visit_prog(ir.transforms.BoogieReservedRenamer(boogieReserved), ctx.program) - - assert(invariant.singleCallBlockEnd(ctx.program)) - - // check all blocks with an atomic section exist within the same procedure - val visited = mutable.Set[Block]() - for (p <- ctx.program.procedures) { - for (b <- p.blocks) { - if (!visited.contains(b)) { - if (b.atomicSection.isDefined) { - b.atomicSection.get.getBlocks.foreach { a => assert(a.parent == p) } - visited.addAll(b.atomicSection.get.getBlocks) - } - visited.addOne(b) - } - } - } - } - - def generateProcedureSummaries(ctx: IRContext, IRProgram: Program, simplified: Boolean = false): Boolean = { - var modified = false - // Need to know modifies clauses to generate summaries, but this is probably out of place - val specModifies = ctx.specification.subroutines.map(s => s.name -> s.modifies).toMap - ctx.program.setModifies(specModifies) - - val summaryGenerator = SummaryGenerator(IRProgram, simplified) - IRProgram.procedures - .filter { p => - p != IRProgram.mainProcedure - } - .foreach { procedure => - { - val req = summaryGenerator.generateRequires(procedure) - modified = modified | procedure.requires != req - procedure.requires = req - - val ens = summaryGenerator.generateEnsures(procedure) - modified = modified | procedure.ensures != ens - procedure.ensures = ens - } - } - - modified - } - - def generateLoopInvariants(IRProgram: Program) = { - FullLoopInvariantGenerator(IRProgram).addInvariants() - } - - def generateRelyGuaranteeConditions(threads: List[Procedure]): Unit = { - /* Todo: For the moment we are printing these to stdout, but in future we'd - like to add them to the IR. */ - type StateLatticeElement = LatticeMap[Variable, analysis.Interval] - type InterferenceLatticeElement = Map[Variable, StateLatticeElement] - val stateLattice = IntervalLatticeExtension() - val stateTransfer = SignedIntervalDomain().transfer - val intDom = ConditionalWritesDomain[StateLatticeElement](stateLattice, stateTransfer) - val relyGuarantees = - RelyGuaranteeGenerator[InterferenceLatticeElement, StateLatticeElement](intDom).generate(threads) - for ((p, (rely, guar)) <- relyGuarantees) { - StaticAnalysisLogger.info("--- " + p.procName + " " + "-" * 50 + "\n") - StaticAnalysisLogger.info("Rely:") - StaticAnalysisLogger.info(intDom.toString(rely) + "\n") - StaticAnalysisLogger.info("Guarantee:") - StaticAnalysisLogger.info(intDom.toString(guar) + "\n") - } - } -} - -/** Methods relating to program static analysis. - */ -object StaticAnalysis { - - def reducibleLoops(IRProgram: Program) = { - StaticAnalysisLogger.debug("reducible loops") - val foundLoops = LoopDetector.identify_loops(IRProgram) - foundLoops.irreducibleLoops.foreach(l => StaticAnalysisLogger.debug(s"Irreducible loop found: ${l.name}")) - - val newLoops = foundLoops.reducibleTransformIR().identifiedLoops - newLoops.foreach(l => StaticAnalysisLogger.debug(s"Loop found: ${l.name}")) - - foundLoops.updateIrWithLoops() - } - - /** Run all static analysis passes on the provided IRProgram. - */ - def analyse( - ictx: IRContext, - config: StaticAnalysisConfig, - iteration: Int, - previousResults: Option[StaticAnalysisContext] = None - ): StaticAnalysisContext = { - var ctx = ictx - val IRProgram: Program = ctx.program - val externalFunctions: Set[ExternalFunction] = ctx.externalFunctions - val globals: Set[SpecGlobal] = ctx.globals - val globalOffsets: Map[BigInt, BigInt] = ctx.globalOffsets - - assert(invariant.singleCallBlockEnd(ctx.program)) - assert(invariant.cfgCorrect(ctx.program)) - assert(invariant.blocksUniqueToEachProcedure(ctx.program)) - assert(invariant.correctCalls(ctx.program)) - - val subroutines = IRProgram.procedures - .filter(p => p.address.isDefined) - .map(p => p.address.get -> p.name) - .toMap - val globalAddresses = globals.map(s => s.address -> s.name).toMap - val globalSizes = globals.map(s => s.name -> s.size).toMap - val externalAddresses = externalFunctions.map(e => e.offset -> e.name).toMap - StaticAnalysisLogger.debug("Globals:") - StaticAnalysisLogger.debug(globalAddresses) - StaticAnalysisLogger.debug("Global Offsets: ") - StaticAnalysisLogger.debug(globalOffsets) - StaticAnalysisLogger.debug("Global Sizes: ") - StaticAnalysisLogger.debug(globalSizes) - StaticAnalysisLogger.debug("External: ") - StaticAnalysisLogger.debug(externalAddresses) - StaticAnalysisLogger.debug("Subroutine Addresses:") - StaticAnalysisLogger.debug(subroutines) - - // reducible loops - if (config.irreducibleLoops) { - reducibleLoops(IRProgram) - - config.analysisDotPath.foreach { s => - AnalysisResultDotLogger.writeToFile( - File(s"${s}_graph-after-loop-reduce-$iteration.dot"), - dotBlockGraph(IRProgram, IRProgram.map(b => b -> b.toString).toMap) - ) - AnalysisResultDotLogger.writeToFile( - File(s"${s}_blockgraph-after-loop-reduce-$iteration.dot"), - dotBlockGraph(IRProgram, IRProgram.filter(_.isInstanceOf[Block]).map(b => b -> b.toString).toMap) - ) - } - } - - val mergedSubroutines = subroutines ++ externalAddresses - - val domain = computeDomain(IntraProcIRCursor, IRProgram.procedures) - val interDomain = computeDomain(InterProcIRCursor, IRProgram.procedures) - - StaticAnalysisLogger.debug("[!] Running ANR") - val ANRSolver = ANRAnalysisSolver(IRProgram) - val ANRResult = ANRSolver.analyze() - - StaticAnalysisLogger.debug("[!] Running RNA") - val RNASolver = RNAAnalysisSolver(IRProgram) - val RNAResult = RNASolver.analyze() - - StaticAnalysisLogger.debug("[!] Running Inter-procedural Constant Propagation") - val interProcConstProp = InterProcConstantPropagation(IRProgram) - val interProcConstPropResult: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]] = - interProcConstProp.analyze() - - config.analysisResultsPath.foreach { s => - DebugDumpIRLogger.writeToFile( - File(s"${s}OGconstprop$iteration.txt"), - printAnalysisResults(IRProgram, interProcConstPropResult) - ) - } - - val intraProcConstProp = IntraProcConstantPropagation(IRProgram) - val intraProcConstPropResult: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]] = - intraProcConstProp.analyze() - - config.analysisResultsPath.foreach { s => - DebugDumpIRLogger.writeToFile( - File(s"${s}_new_ir_constprop$iteration.txt"), - printAnalysisResults(IRProgram, intraProcConstPropResult) - ) - } - - config.analysisDotPath.foreach { f => - val dumpdomain = computeDomain[CFGPosition, CFGPosition](InterProcIRCursor, IRProgram.procedures) - AnalysisResultDotLogger.writeToFile( - File(s"${f}_new_ir_intercfg$iteration.dot"), - toDot(dumpdomain.toSet, InterProcIRCursor, Map.empty, Set()) - ) - } - - val reachingDefinitionsAnalysisSolver = InterprocReachingDefinitionsAnalysisSolver(IRProgram) - val reachingDefinitionsAnalysisResults = reachingDefinitionsAnalysisSolver.analyze() - - config.analysisDotPath.foreach { s => - AnalysisResultDotLogger.writeToFile( - File(s"${s}_reachingDefinitions$iteration.dot"), - toDot( - IRProgram, - IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> reachingDefinitionsAnalysisResults(b).toString).toMap, - true - ) - ) - } - - Logger.debug("[!] Running Writes To") - val writesTo = WriteToAnalysis(ctx.program).analyze() - - Logger.debug("[!] Running commondef variable renaming (Intra SSA)") - val SSAResults = getCommonDefinitionVariableRenaming(IRProgram, writesTo) - - config.analysisDotPath.foreach(s => { - writeToFile( - toDot(IRProgram, IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> SSAResults(b).toString).toMap, true), - s"${s}_SSA$iteration.dot" - ) - }) - - val mmm = MemoryModelMap(globalOffsets, mergedSubroutines, globalAddresses, globalSizes) - mmm.preLoadGlobals() - - val previousVSAResults = if (previousResults.isDefined) { - previousResults.get.vsaResult - } else { - Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]]() - } - - StaticAnalysisLogger.debug("[!] Running GRA") - val graSolver = GlobalRegionAnalysisSolver( - IRProgram, - domain.toSet, - interProcConstPropResult, - reachingDefinitionsAnalysisResults, - mmm, - previousVSAResults - ) - val graResult = graSolver.analyze() - - StaticAnalysisLogger.debug("[!] Running MRA") - val mraSolver = MemoryRegionAnalysisSolver( - IRProgram, - domain.toSet, - interProcConstPropResult, - reachingDefinitionsAnalysisResults, - graResult, - mmm, - previousVSAResults - ) - val mraResult = mraSolver.analyze() - - config.analysisDotPath.foreach { s => - AnalysisResultDotLogger.writeToFile(File(s"${s}_callgraph$iteration.dot"), dotCallGraph(IRProgram)) - AnalysisResultDotLogger.writeToFile( - File(s"${s}_blockgraph$iteration.dot"), - dotBlockGraph(IRProgram, IRProgram.filter(_.isInstanceOf[Block]).map(b => b -> b.toString).toMap) - ) - - AnalysisResultDotLogger.writeToFile( - File(s"${s}_new_ir_constprop$iteration.dot"), - toDot( - IRProgram, - IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> intraProcConstPropResult(b).toString).toMap - ) - ) - - writeToFile( - toDot(IRProgram, IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> ANRResult(b).toString).toMap), - s"${s}_ANR$iteration.dot" - ) - - writeToFile( - toDot(IRProgram, IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> RNAResult(b).toString).toMap), - s"${s}_RNA$iteration.dot" - ) - - writeToFile( - toDot(IRProgram, IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> mraResult(b).toString).toMap), - s"${s}_MRA$iteration.dot" - ) - - AnalysisResultDotLogger.writeToFile( - File(s"${s}_GRA$iteration.dot"), - toDot(IRProgram, IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> graResult(b).toString).toMap) - ) - } - - StaticAnalysisLogger.debug("[!] Running MMM") - mmm.convertMemoryRegions( - mraSolver.procedureToStackRegions, - mraSolver.procedureToHeapRegions, - mraResult, - mraSolver.procedureToSharedRegions, - graSolver.getDataMap, - graResult - ) - mmm.logRegions() - - Logger.debug("[!] Running VSA") - val vsaSolver = ValueSetAnalysisSolver(IRProgram, mmm) - val vsaResult: Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]] = vsaSolver.analyze() - - mmm.postLoadVSARelations(vsaResult, ANRResult, RNAResult) - - config.analysisDotPath.foreach { s => - AnalysisResultDotLogger.writeToFile( - File(s"${s}_VSA$iteration.dot"), - toDot(IRProgram, IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> vsaResult(b).toString).toMap) - ) - } - - Logger.debug("[!] Running Steensgaard") - val steensgaardSolver = InterprocSteensgaardAnalysis(interDomain.toSet, mmm, SSAResults) - steensgaardSolver.analyze() - val steensgaardResults = steensgaardSolver.pointsTo() - - mmm.setCallSiteSummaries(steensgaardSolver.callSiteSummary) - - val paramResults: Map[Procedure, Set[Variable]] = ParamAnalysis(IRProgram).analyze() - val interLiveVarsResults: Map[CFGPosition, Map[Variable, TwoElement]] = InterLiveVarsAnalysis(IRProgram).analyze() - - StaticAnalysisContext( - intraProcConstProp = intraProcConstPropResult, - interProcConstProp = interProcConstPropResult, - memoryRegionResult = mraResult, - vsaResult = vsaResult, - interLiveVarsResults = interLiveVarsResults, - paramResults = paramResults, - steensgaardResults = steensgaardResults, - mmmResults = mmm, - symbolicAddresses = Map.empty, - reachingDefs = reachingDefinitionsAnalysisResults, - regionInjector = None, - localDSA = Map.empty, - bottomUpDSA = Map.empty, - topDownDSA = Map.empty, - writesToResult = writesTo, - ssaResults = SSAResults - ) - } - - def printAnalysisResults(prog: Program, result: Map[CFGPosition, _]): String = { - val results = mutable.ArrayBuffer[String]() - val toVisit = mutable.Stack[CFGPosition]() - val visited = mutable.HashSet[CFGPosition]() - toVisit.pushAll(prog.procedures) - - while (toVisit.nonEmpty) { - val next = toVisit.pop() - visited.add(next) - toVisit.pushAll( - IntraProcBlockIRCursor - .succ(next) - .diff(visited.collect[Block] { case b: Block => - b - }) - ) - - def contentStr(b: CFGPosition) = { - if result.contains(b) then "\n :: " + result(b) - else "" - } - - val t = next match - case p: Procedure => s"\nProcedure ${p.name}" - case b: Block => - Seq( - s" Block ${b.label}${contentStr(b)}", - b.statements - .map(s => { - " " + s.toString + contentStr(s) - }) - .mkString("\n"), - " " + b.jump.toString + contentStr(b.jump) - ).mkString("\n") - case s: Statement => s" Statement $s${contentStr(s)}" - case s: Jump => s" Jump $s${contentStr(s)}" - results.addOne(t) - } - results.mkString(System.lineSeparator()) - } -} - object RunUtils { def run(q: BASILConfig): BASILResult = { @@ -755,165 +50,6 @@ object RunUtils { } } - def doSimplify(ctx: IRContext, config: Option[StaticAnalysisConfig]): Unit = { - // writeToFile(dotBlockGraph(program, program.filter(_.isInstanceOf[Block]).map(b => b -> b.toString).toMap), s"blockgraph-before-simp.dot") - Logger.info("[!] Running Simplify") - val timer = PerformanceTimer("Simplify") - val program = ctx.program - - val foundLoops = LoopDetector.identify_loops(program) - val newLoops = foundLoops.reducibleTransformIR() - newLoops.updateIrWithLoops() - - for (p <- program.procedures) { - p.normaliseBlockNames() - } - - ctx.program.sortProceduresRPO() - - transforms.liftSVComp(ctx.program) - - config.foreach { - _.dumpILToPath.foreach { s => - DebugDumpIRLogger.writeToFile(File(s"${s}_il-before-simp.il"), pp_prog(program)) - } - } - - transforms.applyRPO(program) - - // example of printing a simple analysis - - transforms.removeEmptyBlocks(program) - transforms.coalesceBlocks(program) - transforms.removeEmptyBlocks(program) - - // transforms.coalesceBlocksCrossBranchDependency(program) - config.foreach { - _.analysisDotPath.foreach { s => - DebugDumpIRLogger.writeToFile(File(s"${s}_blockgraph-before-dsa.dot"), dotBlockGraph(program.mainProcedure)) - } - } - - Logger.info("[!] Simplify :: DynamicSingleAssignment") - config.foreach { - _.dumpILToPath.foreach { s => - DebugDumpIRLogger.writeToFile(File(s"${s}_il-before-dsa.il"), pp_prog(program)) - } - } - - transforms.OnePassDSA().applyTransform(program) - - transforms.inlinePLTLaunchpad(ctx.program) - - transforms.removeEmptyBlocks(program) - - config.foreach { - _.analysisDotPath.foreach { s => - AnalysisResultDotLogger.writeToFile( - File(s"${s}_blockgraph-after-dsa.dot"), - dotBlockGraph( - program, - (program.collect { case b: Block => - b -> pp_block(b) - }).toMap - ) - ) - } - } - config.foreach { - _.dumpILToPath.foreach { s => - DebugDumpIRLogger.writeToFile(File(s"${s}_il-after-dsa.il"), pp_prog(program)) - } - } - - if (ir.eval.SimplifyValidation.validate) { - Logger.info("DSA no uninitialised") - assert(invariant.allVariablesAssignedIndex(program)) - // Logger.info("Live vars difftest") - // val tipLiveVars : Map[CFGPosition, Set[Variable]] = analysis.IntraLiveVarsAnalysis(program).analyze() - // assert(program.procedures.forall(transforms.difftestLiveVars(_, tipLiveVars))) - - Logger.info("DSA Check") - val x = program.procedures.forall(transforms.rdDSAProperty) - assert(x) - Logger.info("DSA Check passed") - assert(invariant.singleCallBlockEnd(program)) - assert(invariant.cfgCorrect(program)) - assert(invariant.blocksUniqueToEachProcedure(program)) - } - - config.foreach { - _.dumpILToPath.foreach { s => - DebugDumpIRLogger.writeToFile(File(s"${s}_il-before-copyprop.il"), pp_prog(program)) - } - } - - // brute force run the analysis twice because it cleans up more stuff - // assert(program.procedures.forall(transforms.rdDSAProperty)) - config.foreach { - _.analysisDotPath.foreach { s => - AnalysisResultDotLogger.writeToFile( - File(s"${s}_blockgraph-before-copyprop.dot"), - dotBlockGraph(program.mainProcedure) - ) - } - } - Logger.info("Copyprop Start") - transforms.copyPropParamFixedPoint(program, ctx.globalOffsets) - - transforms.fixupGuards(program) - transforms.removeDuplicateGuard(program) - config.foreach { - _.analysisDotPath.foreach { s => - AnalysisResultDotLogger.writeToFile( - File(s"${s}_blockgraph-after-simp.dot"), - dotBlockGraph(program.mainProcedure) - ) - } - } - - transforms.liftLinuxAssertFail(ctx) - - // assert(program.procedures.forall(transforms.rdDSAProperty)) - - assert(invariant.blockUniqueLabels(program)) - Logger.info(s"CopyProp ${timer.checkPoint("Simplify")} ms ") - - config.foreach { - _.dumpILToPath.foreach { s => - DebugDumpIRLogger.writeToFile(File(s"${s}_il-after-copyprop.il"), pp_prog(program)) - } - } - - // val x = program.procedures.forall(transforms.rdDSAProperty) - // assert(x) - if (ir.eval.SimplifyValidation.validate) { - Logger.info("DSA Check (after transform)") - val x = program.procedures.forall(transforms.rdDSAProperty) - assert(x) - Logger.info("DSA Check succeeded") - } - // run this after cond recovery because sign bit calculations often need high bits - // which go away in high level conss - config.foreach { - _.dumpILToPath.foreach { s => - DebugDumpIRLogger.writeToFile(File(s"${s}_il-after-slices.il"), pp_prog(program)) - } - } - - // re-apply dsa - // transforms.OnePassDSA().applyTransform(program) - - if (ir.eval.SimplifyValidation.validate) { - Logger.info("[!] Simplify :: Writing simplification validation") - val w = BufferedWriter(FileWriter("rewrites.smt2")) - ir.eval.SimplifyValidation.makeValidation(w) - w.close() - } - - Logger.info("[!] Simplify :: finished") - } - def loadAndTranslate(conf: BASILConfig, postLoad: IRContext => Unit = s => ()): BASILResult = { Logger.info("[!] Loading Program") val q = conf @@ -925,19 +61,21 @@ object RunUtils { assert(invariant.cfgCorrect(ctx.program)) assert(invariant.blocksUniqueToEachProcedure(ctx.program)) - ctx = IRTransform.doCleanup(ctx, conf.simplify) + val analysisManager = AnalysisManager(ctx.program) + + if conf.simplify then doCleanupWithSimplify(ctx, analysisManager) + else doCleanupWithoutSimplify(ctx, analysisManager) + assert(ir.invariant.programDiamondForm(ctx.program)) - transforms.inlinePLTLaunchpad(ctx.program) + transforms.inlinePLTLaunchpad(ctx, analysisManager) assert(ir.invariant.programDiamondForm(ctx.program)) - if (q.loading.trimEarly) { - val before = ctx.program.procedures.size - transforms.stripUnreachableFunctions(ctx.program, q.loading.procedureTrimDepth) - Logger.info( - s"[!] Removed ${before - ctx.program.procedures.size} functions (${ctx.program.procedures.size} remaining)" - ) - } + + if q.loading.trimEarly then + getStripUnreachableFunctionsTransform(q.loading.procedureTrimDepth)(ctx, analysisManager) + // todo: since refactoring, there is some extra code that is run here + // see StripUnreachableFunctions.getStripUnreachableFunctionsTransform assert(ir.invariant.programDiamondForm(ctx.program)) ctx.program.procedures.foreach(transforms.RemoveUnreachableBlocks.apply) @@ -964,7 +102,7 @@ object RunUtils { q.loading.dumpIL.foreach(s => DebugDumpIRLogger.writeToFile(File(s"$s-before-analysis.il"), pp_prog(ctx.program))) val analysis = q.staticAnalysis.map { conf => - staticAnalysis(conf, ctx) + AnalysisPipelineMRA.runToFixpoint(conf, ctx) } q.loading.dumpIL.foreach(s => DebugDumpIRLogger.writeToFile(File(s"$s-after-analysis.il"), pp_prog(ctx.program))) @@ -1011,18 +149,16 @@ object RunUtils { memTransferTimer.checkPoint("Performed Memory Transform") } - if (conf.summariseProcedures) { - StaticAnalysisLogger.info("[!] Generating Procedure Summaries") - IRTransform.generateProcedureSummaries(ctx, ctx.program, q.loading.parameterForm || conf.simplify) - } + if q.summariseProcedures then + getGenerateProcedureSummariesTransform(q.loading.parameterForm || q.simplify)(ctx, analysisManager) if (!conf.staticAnalysis.exists(!_.irreducibleLoops) && conf.generateLoopInvariants) { if (!conf.staticAnalysis.exists(_.irreducibleLoops)) { - StaticAnalysis.reducibleLoops(ctx.program) + AnalysisPipelineMRA.reducibleLoops(ctx.program) } StaticAnalysisLogger.info("[!] Generating Loop Invariants") - IRTransform.generateLoopInvariants(ctx.program) + FullLoopInvariantGenerator(ctx.program).addInvariants() } if (q.runInterpret) { @@ -1055,12 +191,13 @@ object RunUtils { } } - IRTransform.prepareForTranslation(q, ctx) + getPrepareForTranslationTransform(q, Set("free"))(ctx, analysisManager) - if (conf.generateRelyGuarantees) { - StaticAnalysisLogger.info("[!] Generating Rely-Guarantee Conditions") - IRTransform.generateRelyGuaranteeConditions(ctx.program.procedures.toList.filter(p => p.returnBlock != None)) - } + if conf.generateRelyGuarantees then + getGenerateRgConditionsTransform(ctx.program.procedures.toList.filter(_.returnBlock != None))( + ctx, + analysisManager + ) q.loading.dumpIL.foreach(s => { val timer = PerformanceTimer("Dump IL") @@ -1098,106 +235,6 @@ object RunUtils { BASILResult(ctx, analysis, dsaContext, boogiePrograms) } - /** Use static analysis to resolve indirect calls and replace them in the IR until fixed point. - */ - def staticAnalysis(config: StaticAnalysisConfig, ctx: IRContext): StaticAnalysisContext = { - var iteration = 1 - var modified: Boolean = true - val analysisResult = mutable.ArrayBuffer[StaticAnalysisContext]() - while (modified) { - Logger.debug("[!] Running Static Analysis") - val result = StaticAnalysis.analyse(ctx, config, iteration, analysisResult.lastOption) - val previousResult = analysisResult.lastOption - analysisResult.append(result) - StaticAnalysisLogger.info("[!] Replacing Indirect Calls") - - /* - modified = transforms.SteensgaardIndirectCallResolution( - ctx.program, - result.steensgaardResults, - result.reachingDefs - ).resolveIndirectCalls() - */ - - if ( - config.memoryRegions == MemoryRegionsMode.MRA && (previousResult.isEmpty || result.vsaResult != previousResult.get.vsaResult) - ) { - modified = true - } else { - modified = - transforms.VSAIndirectCallResolution(ctx.program, result.vsaResult, result.mmmResults).resolveIndirectCalls() - } - - if (modified) { - iteration += 1 - StaticAnalysisLogger.info(s"[!] Analysing again (iter $iteration)") - } - } - - // should later move this to be inside while (modified) loop and have splitting threads cause further iterations - - if (config.threadSplit) { - transforms.splitThreads(ctx.program, analysisResult.last.steensgaardResults, analysisResult.last.ssaResults) - } - - val reachingDefs = ReachingDefsAnalysis(ctx.program, analysisResult.last.writesToResult).analyze() - config.analysisDotPath.foreach { s => - AnalysisResultDotLogger.writeToFile(File(s"${s}_ct.dot"), toDot(ctx.program)) - } - - StaticAnalysisLogger.info("[!] Running Symbolic Access Analysis") - val symResults: Map[CFGPosition, Map[SymbolicAddress, TwoElement]] = - SymbolicAddressAnalysis(ctx.program, analysisResult.last.interProcConstProp).analyze() - config.analysisDotPath.foreach { s => - val labels = symResults.map { (k, v) => k -> v.toString } - AnalysisResultDotLogger.writeToFile(File(s"${s}_saa.dot"), toDot(ctx.program, labels)) - } - - StaticAnalysisLogger.info("[!] Running DSA Analysis") - - writeToFile(pp_prog(ctx.program), "testo1.il") - val symbolTableEntries: Set[SymbolTableEntry] = ctx.globals ++ ctx.funcEntries - val dsa = DataStructureAnalysis( - ctx.program, - symResults, - analysisResult.last.interProcConstProp, - symbolTableEntries, - ctx.globalOffsets, - ctx.externalFunctions, - reachingDefs, - analysisResult.last.writesToResult, - analysisResult.last.paramResults - ) - dsa.analyze() - - config.analysisDotPath.foreach { s => - dsa.topDown(ctx.program.mainProcedure).toDot - DebugDumpIRLogger.writeToFile(File(s"${s}_main_dsg.dot"), dsa.topDown(ctx.program.mainProcedure).toDot) - } - - Logger.debug("[!] Injecting regions") - val regionInjector = if (config.memoryRegions == MemoryRegionsMode.MRA) { - val injector = RegionInjectorMRA(ctx.program, analysisResult.last.mmmResults) - injector.injectRegions() - Some(injector) - } else if (config.memoryRegions == MemoryRegionsMode.DSA) { - val injector = RegionInjectorDSA(ctx.program, dsa.topDown) - injector.injectRegions() - Some(injector) - } else { - None - } - - assert(invariant.singleCallBlockEnd(ctx.program)) - StaticAnalysisLogger.info(s"[!] Finished indirect call resolution after $iteration iterations") - analysisResult.last.copy( - symbolicAddresses = symResults, - localDSA = dsa.local.toMap, - bottomUpDSA = dsa.bottomUp.toMap, - topDownDSA = dsa.topDown.toMap, - regionInjector = regionInjector - ) - } } def readFromFile(fileName: String): Iterable[String] = { diff --git a/src/test/scala/ConditionLiftingTests.scala b/src/test/scala/ConditionLiftingTests.scala index 5ca554a42..c2c25d281 100644 --- a/src/test/scala/ConditionLiftingTests.scala +++ b/src/test/scala/ConditionLiftingTests.scala @@ -1,3 +1,4 @@ +import analysis.AnalysisManager import ir.* import ir.dsl.* import org.scalatest.funsuite.AnyFunSuite @@ -816,11 +817,12 @@ class ConditionLiftingRegressionTest extends AnyFunSuite with test_util.CaptureO test("conds inline test") { - var ctx = util.IRLoading.load(testProgram) - util.IRTransform.doCleanup(ctx, true) + var ctx = ir.IRLoading.load(testProgram) + + ir.transforms.doCleanupWithSimplify(ctx, AnalysisManager(ctx.program)) ir.transforms.clearParams(ctx.program) ctx = ir.transforms.liftProcedureCallAbstraction(ctx) - util.RunUtils.doSimplify(ctx, None) + ir.transforms.doSimplify(ctx, None) for (p <- ctx.program.procedures) { p.normaliseBlockNames() } diff --git a/src/test/scala/DataStructureAnalysisTest.scala b/src/test/scala/DataStructureAnalysisTest.scala index 4f4e03899..2562e968f 100644 --- a/src/test/scala/DataStructureAnalysisTest.scala +++ b/src/test/scala/DataStructureAnalysisTest.scala @@ -1,4 +1,5 @@ import analysis.data_structure_analysis.* +import analysis.{AnalysisPipelineMRA, StaticAnalysisContext} import boogie.SpecGlobal import ir.* import ir.dsl.* @@ -6,16 +7,7 @@ import org.scalatest.funsuite.AnyFunSuite import specification.Specification import test_util.{BASILTest, CaptureOutput} import translating.PrettyPrinter.* -import util.{ - BASILConfig, - BASILResult, - BoogieGeneratorConfig, - ILLoadingConfig, - IRContext, - RunUtils, - StaticAnalysisConfig, - StaticAnalysisContext -} +import util.{BASILConfig, BASILResult, BoogieGeneratorConfig, ILLoadingConfig, RunUtils, StaticAnalysisConfig} /** This is the test suite for testing DSA functionality The tests follow a general pattern of running BASIL analyses on * a test program and then asserting properties about the Data Structure Graph (DSG) of the function produced at @@ -35,7 +27,7 @@ class DataStructureAnalysisTest extends AnyFunSuite with CaptureOutput { val emptySpec = Specification(Set(), Set(), Map(), List(), List(), List(), Set()) val emptyContext = IRContext(List(), Set(), Set(), Set(), Map(), emptySpec, program) - RunUtils.staticAnalysis(StaticAnalysisConfig(), emptyContext) + AnalysisPipelineMRA.runToFixpoint(StaticAnalysisConfig(), emptyContext) } def runTest(relativePath: String): BASILResult = { @@ -762,7 +754,7 @@ class DataStructureAnalysisTest extends AnyFunSuite with CaptureOutput { val spec = Specification(Set(), globals, Map(), List(), List(), List(), Set()) val context = IRContext(List(), Set(), globals, Set(), globalOffsets, spec, program) - val staticAnalysisResult = RunUtils.staticAnalysis(StaticAnalysisConfig(), context) + val staticAnalysisResult = AnalysisPipelineMRA.runToFixpoint(StaticAnalysisConfig(), context) val dsg = staticAnalysisResult.topDownDSA(program.mainProcedure) diff --git a/src/test/scala/DifferentialAnalysisTest.scala b/src/test/scala/DifferentialAnalysisTest.scala index 91426bb9c..10822d8c3 100644 --- a/src/test/scala/DifferentialAnalysisTest.scala +++ b/src/test/scala/DifferentialAnalysisTest.scala @@ -1,9 +1,10 @@ -import ir.* +import analysis.{AnalysisManager, AnalysisPipelineMRA} import ir.eval.{ExecEffect, *} +import ir.{IRContext, IRLoading, *} import org.scalatest.* import org.scalatest.funsuite.* import test_util.* -import util.{ILLoadingConfig, IRContext, IRLoading, IRTransform, LogLevel, Logger, RunUtils, StaticAnalysisConfig} +import util.{ILLoadingConfig, LogLevel, Logger, StaticAnalysisConfig} import java.io.File @@ -89,23 +90,23 @@ abstract class DifferentialTest extends AnyFunSuite, CaptureOutput, TestCustomis ) var ictx = IRLoading.load(loading) - ictx = IRTransform.doCleanup(ictx) + ir.transforms.doCleanupWithoutSimplify(ictx, AnalysisManager(ictx.program)) var comparectx = IRLoading.load(loading) - comparectx = IRTransform.doCleanup(comparectx) + ir.transforms.doCleanupWithoutSimplify(comparectx, AnalysisManager(comparectx.program)) ir.transforms.clearParams(ictx.program) ir.transforms.clearParams(comparectx.program) for (analysis <- staticAnalysisConfig) { - RunUtils.staticAnalysis(analysis, comparectx) + AnalysisPipelineMRA.runToFixpoint(analysis, comparectx) } if (simplify) { ictx = ir.transforms.liftProcedureCallAbstraction(ictx) comparectx = ir.transforms.liftProcedureCallAbstraction(comparectx) - RunUtils.doSimplify(ictx, staticAnalysisConfig) + ir.transforms.doSimplify(ictx, staticAnalysisConfig) } diffTest(ictx, comparectx) diff --git a/src/test/scala/IndirectCallTests.scala b/src/test/scala/IndirectCallTests.scala index b9120cb79..2a02a81cc 100644 --- a/src/test/scala/IndirectCallTests.scala +++ b/src/test/scala/IndirectCallTests.scala @@ -1,8 +1,8 @@ -import analysis.data_structure_analysis.* +import analysis.data_structure_analysis.{DSAContext, *} import ir.* import org.scalatest.funsuite.* import test_util.{BASILTest, CaptureOutput, TestConfig, TestCustomisation} -import util.{BASILResult, DSAContext, DSConfig, LogLevel, Logger, StaticAnalysisConfig} +import util.{BASILResult, DSConfig, LogLevel, Logger, StaticAnalysisConfig} import scala.collection.mutable import scala.collection.mutable.ArrayBuffer diff --git a/src/test/scala/InterpretTestConstProp.scala b/src/test/scala/InterpretTestConstProp.scala index 7ea77790e..683c2eb53 100644 --- a/src/test/scala/InterpretTestConstProp.scala +++ b/src/test/scala/InterpretTestConstProp.scala @@ -1,9 +1,9 @@ -import analysis.* -import ir.* +import analysis.{AnalysisPipelineMRA, *} +import ir.{IRLoading, *} import org.scalatest.* import org.scalatest.funsuite.* import test_util.{BASILTest, CaptureOutput, TestValueDomainWithInterpreter} -import util.{ILLoadingConfig, IRLoading, IRTransform, LogLevel, Logger, RunUtils, StaticAnalysisConfig} +import util.{ILLoadingConfig, LogLevel, Logger, StaticAnalysisConfig} @test_util.tags.StandardSystemTest class InterpretTestConstProp @@ -26,10 +26,10 @@ class InterpretTestConstProp val path = s"$testPath/$testName/$compiler/$testName" val loading = ILLoadingConfig(inputFile = s"$path.adt", relfFile = Some(s"$path.relf"), dumpIL = None) - var ictx = IRLoading.load(loading) - ictx = IRTransform.doCleanup(ictx) + val ictx = IRLoading.load(loading) + ir.transforms.doCleanupWithoutSimplify(ictx, AnalysisManager(ictx.program)) ir.transforms.clearParams(ictx.program) - val analyses = RunUtils.staticAnalysis(StaticAnalysisConfig(None, None, None), ictx) + val analyses = AnalysisPipelineMRA.runToFixpoint(StaticAnalysisConfig(None, None, None), ictx) val analysisres = analyses.intraProcConstProp.collect { case (block: Block, v) => block -> v diff --git a/src/test/scala/IntervalDSATest.scala b/src/test/scala/IntervalDSATest.scala index a0aa77c75..185ca002e 100644 --- a/src/test/scala/IntervalDSATest.scala +++ b/src/test/scala/IntervalDSATest.scala @@ -1,5 +1,5 @@ -import analysis.data_structure_analysis import analysis.data_structure_analysis.{DSInterval, Heap, IntervalDSA, Par, Ret, SymBase} +import analysis.{AnalysisPipelineMRA, StaticAnalysisContext, data_structure_analysis} import boogie.SpecGlobal import ir.Endian.LittleEndian import ir.dsl.{block, directCall, goto, proc, prog, ret} @@ -224,7 +224,7 @@ def runAnalysis(program: Program): StaticAnalysisContext = { val emptySpec = Specification(Set(), Set(), Map(), List(), List(), List(), Set()) val emptyContext = IRContext(List(), Set(), Set(), Set(), Map(), emptySpec, program) - RunUtils.staticAnalysis(StaticAnalysisConfig(), emptyContext) + AnalysisPipelineMRA.runToFixpoint(StaticAnalysisConfig(), emptyContext) } def programToContext( @@ -247,7 +247,6 @@ def globalsToLiteral(ctx: IRContext) = { @test_util.tags.AnalysisSystemTest3 class IntervalDSATest extends AnyFunSuite with test_util.CaptureOutput { - def runTest(relativePath: String, main: Option[String] = None, config: DSConfig = DSConfig()): BASILResult = { val path = s"${BASILTest.rootDirectory}/$relativePath" RunUtils.loadAndTranslate( diff --git a/src/test/scala/IrreducibleLoop.scala b/src/test/scala/IrreducibleLoop.scala index f200245a4..f0ef333c7 100644 --- a/src/test/scala/IrreducibleLoop.scala +++ b/src/test/scala/IrreducibleLoop.scala @@ -1,9 +1,9 @@ import analysis.LoopDetector -import ir.{Block, Program, dotBlockGraph} +import ir.{Block, IRLoading, Program, dotBlockGraph} import org.scalatest.funsuite.AnyFunSuite import test_util.{BASILTest, CaptureOutput} import translating.{BAPToIR, ReadELFData} -import util.{ILLoadingConfig, IRLoading, LogLevel, Logger} +import util.{ILLoadingConfig, LogLevel, Logger} import scala.sys.process.* diff --git a/src/test/scala/LoopInvariantTests.scala b/src/test/scala/LoopInvariantTests.scala index e478b1bd4..b37b5d006 100644 --- a/src/test/scala/LoopInvariantTests.scala +++ b/src/test/scala/LoopInvariantTests.scala @@ -5,7 +5,7 @@ import ir.dsl.* import org.scalatest.funsuite.AnyFunSuite import test_util.{CaptureOutput, programToContext} import util.SMT.{SMTSolver, SatResult} -import util.{BASILConfig, BASILResult, BoogieGeneratorConfig, ILLoadingConfig, IRContext, RunUtils} +import util.{BASILConfig, BASILResult, BoogieGeneratorConfig, ILLoadingConfig, RunUtils} @test_util.tags.UnitTest class LoopInvariantTests extends AnyFunSuite, CaptureOutput { diff --git a/src/test/scala/MemoryTransformTests.scala b/src/test/scala/MemoryTransformTests.scala index 509679127..a104faf8b 100644 --- a/src/test/scala/MemoryTransformTests.scala +++ b/src/test/scala/MemoryTransformTests.scala @@ -1,3 +1,4 @@ +import analysis.{AnalysisPipelineMRA, StaticAnalysisContext} import boogie.SpecGlobal import ir.* import ir.Endian.LittleEndian @@ -16,7 +17,7 @@ class MemoryTransformTests extends AnyFunSuite with CaptureOutput { val emptySpec = Specification(Set(), Set(), Map(), List(), List(), List(), Set()) val emptyContext = IRContext(List(), Set(), Set(), Set(), Map(), emptySpec, program) - RunUtils.staticAnalysis(StaticAnalysisConfig(), emptyContext) + AnalysisPipelineMRA.runToFixpoint(StaticAnalysisConfig(), emptyContext) } def runTest(relativePath: String): BASILResult = { diff --git a/src/test/scala/PCTrackingTest.scala b/src/test/scala/PCTrackingTest.scala index c31af649a..9f65bd82a 100644 --- a/src/test/scala/PCTrackingTest.scala +++ b/src/test/scala/PCTrackingTest.scala @@ -1,7 +1,7 @@ -import ir.* +import ir.{IRContext, *} import org.scalatest.funsuite.AnyFunSuite import test_util.{BASILTest, CaptureOutput} -import util.{BASILConfig, BoogieGeneratorConfig, ILLoadingConfig, IRContext, PCTrackingOption, StaticAnalysisConfig} +import util.{BASILConfig, BoogieGeneratorConfig, ILLoadingConfig, PCTrackingOption, StaticAnalysisConfig} @test_util.tags.UnitTest class PCTrackingTest extends AnyFunSuite with CaptureOutput { diff --git a/src/test/scala/PointsToTest.scala b/src/test/scala/PointsToTest.scala index cfcf725a8..1bb119271 100644 --- a/src/test/scala/PointsToTest.scala +++ b/src/test/scala/PointsToTest.scala @@ -1,12 +1,13 @@ +import analysis.StaticAnalysisContext import boogie.* -import ir.* import ir.Endian.LittleEndian import ir.dsl.* +import ir.{IRContext, *} import org.scalatest.* import org.scalatest.funsuite.* import specification.* import test_util.CaptureOutput -import util.{IRContext, StaticAnalysis, StaticAnalysisConfig, StaticAnalysisContext} +import util.StaticAnalysisConfig @test_util.tags.DisabledTest class PointsToTest extends AnyFunSuite with CaptureOutput with OneInstancePerTest { @@ -28,7 +29,7 @@ class PointsToTest extends AnyFunSuite with CaptureOutput with OneInstancePerTes Specification(Set(), Set(), Map(), List(), List(), List(), Set()), program ) - StaticAnalysis.analyse(ctx, StaticAnalysisConfig(), 1) + analysis.AnalysisPipelineMRA.analyse(ctx, StaticAnalysisConfig(), 1) } /** Test that the analysis correctly identifies the stack pointer even when it is aliased diff --git a/src/test/scala/RemovePCTest.scala b/src/test/scala/RemovePCTest.scala index 944f33e0f..82e1793a1 100644 --- a/src/test/scala/RemovePCTest.scala +++ b/src/test/scala/RemovePCTest.scala @@ -1,7 +1,7 @@ import ir.* import org.scalatest.funsuite.AnyFunSuite import test_util.{BASILTest, CaptureOutput} -import util.{BASILConfig, BoogieGeneratorConfig, ILLoadingConfig, IRContext, PCTrackingOption, StaticAnalysisConfig} +import util.{BASILConfig, BoogieGeneratorConfig, ILLoadingConfig, PCTrackingOption, StaticAnalysisConfig} @test_util.tags.UnitTest class RemovePCTest extends AnyFunSuite with CaptureOutput { diff --git a/src/test/scala/SVATest.scala b/src/test/scala/SVATest.scala index d4f6512fe..8a2a77566 100644 --- a/src/test/scala/SVATest.scala +++ b/src/test/scala/SVATest.scala @@ -1,5 +1,6 @@ import analysis.data_structure_analysis.* import analysis.data_structure_analysis.given +import analysis.{AnalysisPipelineMRA, StaticAnalysisContext} import boogie.SpecGlobal import ir.* import ir.dsl.* @@ -18,7 +19,7 @@ class SVATest extends AnyFunSuite with CaptureOutput { val emptySpec = Specification(Set(), Set(), Map(), List(), List(), List(), Set()) val emptyContext = IRContext(List(), Set(), Set(), Set(), Map(), emptySpec, program) - RunUtils.staticAnalysis(StaticAnalysisConfig(), emptyContext) + AnalysisPipelineMRA.runToFixpoint(StaticAnalysisConfig(), emptyContext) } def runTest(context: IRContext): BASILResult = { diff --git a/src/test/scala/TestKnownBitsInterpreter.scala b/src/test/scala/TestKnownBitsInterpreter.scala index c654a2337..297018e50 100644 --- a/src/test/scala/TestKnownBitsInterpreter.scala +++ b/src/test/scala/TestKnownBitsInterpreter.scala @@ -132,7 +132,7 @@ class TestKnownBitsInterpreter ) ) - val kbitsCtx = util.IRLoading.load(kbitsProg) + val kbitsCtx = ir.IRLoading.load(kbitsProg) def params(v1: BigInt, v2: BigInt) = Some( Seq( diff --git a/src/test/scala/ir/InterpreterTests.scala b/src/test/scala/ir/InterpreterTests.scala index a4cd3c56d..681724bca 100644 --- a/src/test/scala/ir/InterpreterTests.scala +++ b/src/test/scala/ir/InterpreterTests.scala @@ -1,5 +1,6 @@ package ir +import analysis.AnalysisManager import boogie.{Scope, SpecGlobal} import ir.dsl.* import ir.dsl.given @@ -9,7 +10,7 @@ import org.scalatest.funsuite.AnyFunSuite import test_util.{BASILTest, CaptureOutput} import translating.PrettyPrinter.* import util.functional.* -import util.{ILLoadingConfig, IRContext, IRLoading, IRTransform, LogLevel, Logger, PerformanceTimer} +import util.{ILLoadingConfig, LogLevel, Logger, PerformanceTimer} import scala.language.implicitConversions @@ -44,8 +45,9 @@ class InterpreterTests extends AnyFunSuite with CaptureOutput with BeforeAndAfte dumpIL = None ) - val p = IRLoading.load(loading) - val ctx = IRTransform.doCleanup(p) + val ctx = IRLoading.load(loading) + ir.transforms.doCleanupWithoutSimplify(ctx, AnalysisManager(ctx.program)) + ir.transforms.clearParams(ctx.program) // val bapProgram = loadBAP(loading.inputFile) // val (symbols, externalFunctions, globals, _, mainAddress) = loadReadELF(loading.relfFile, loading) diff --git a/src/test/scala/test_util/BASILTest.scala b/src/test/scala/test_util/BASILTest.scala index cd6c60cd0..e6dc3f2f4 100644 --- a/src/test/scala/test_util/BASILTest.scala +++ b/src/test/scala/test_util/BASILTest.scala @@ -1,5 +1,6 @@ package test_util +import ir.IRContext import org.scalatest.concurrent.ScaledTimeSpans import org.scalatest.time.{Seconds, Span} import util.boogie_interaction.* @@ -9,7 +10,6 @@ import util.{ BoogieGeneratorConfig, DSConfig, ILLoadingConfig, - IRContext, Logger, RunUtils, StaticAnalysisConfig diff --git a/src/test/scala/test_util/Context.scala b/src/test/scala/test_util/Context.scala index 7d8ce53c1..cb8c089e5 100644 --- a/src/test/scala/test_util/Context.scala +++ b/src/test/scala/test_util/Context.scala @@ -1,9 +1,8 @@ package test_util import boogie.SpecGlobal -import ir.{Program, cilvisitor, transforms} +import ir.{IRContext, Program, cilvisitor, transforms} import specification.Specification -import util.IRContext def programToContext( program: Program, diff --git a/src/test/scala/test_util/TestValueDomainWithInterpreter.scala b/src/test/scala/test_util/TestValueDomainWithInterpreter.scala index 16e6e390b..074494b40 100644 --- a/src/test/scala/test_util/TestValueDomainWithInterpreter.scala +++ b/src/test/scala/test_util/TestValueDomainWithInterpreter.scala @@ -1,9 +1,8 @@ package test_util -import ir.* import ir.eval.* +import ir.{IRContext, *} import translating.PrettyPrinter.* -import util.IRContext import util.functional.State trait TestValueDomainWithInterpreter[T] {