From 3811c030da94944a786d8946e04ff0e9eec08f95 Mon Sep 17 00:00:00 2001 From: Oren Shomron Date: Thu, 11 Sep 2025 13:35:03 -0700 Subject: [PATCH] perf: memoize IR plan calls to reduce reevaluation cost Adds a per-evaluation call memoization stack to the IREvaluationContext. This currently supports rules, not arbitrary functions, based on arity. For a policy with many repeated calls, we see evaluation time drop from ~241ms to ~2ms. No regression in compliance tests. Signed-off-by: Oren Shomron --- Sources/Rego/IREvaluator.swift | 45 ++++++++++++++++++++----- Sources/Rego/Memo.swift | 60 ++++++++++++++++++++++++++++++++++ 2 files changed, 96 insertions(+), 9 deletions(-) create mode 100644 Sources/Rego/Memo.swift diff --git a/Sources/Rego/IREvaluator.swift b/Sources/Rego/IREvaluator.swift index 4913671..ff8a0e3 100644 --- a/Sources/Rego/IREvaluator.swift +++ b/Sources/Rego/IREvaluator.swift @@ -124,6 +124,12 @@ internal struct IREvaluationContext { var policy: IndexedIRPolicy var maxCallDepth: Int = 16_384 var callDepth: Int = 0 + var callMemo = MemoStack() // shared stack of memoCache structs + + init(ctx: EvaluationContext, policy: IndexedIRPolicy) { + self.ctx = ctx + self.policy = policy + } func withIncrementedCallDepth() -> IREvaluationContext { var ctx = self @@ -911,13 +917,16 @@ func evalBlock( var newLocals = currentScopePtr.v.locals newLocals[stmt.local] = patched currentScopePtr = framePtr.v.pushScope(locals: newLocals) - // Start executing the block on the new scope we just pushed - let blockResult = try await evalBlock( - withContext: ctx, - framePtr: framePtr, - caller: statement, - block: stmt.block - ) + + let blockResult = try await ctx.callMemo.withPush { + // Start executing the block on the new scope we just pushed + return try await evalBlock( + withContext: ctx, + framePtr: framePtr, + caller: statement, + block: stmt.block + ) + } // Squash locals from the child frame back into the current frame. // Overlay the original (non-patched) value, keep the other side-effects. @@ -957,6 +966,13 @@ private func evalCall( args: [IR.Operand], isDynamic: Bool = false ) async throws -> AST.RegoValue { + // Check memo cache if applicable to save repeated evaluation time for rules + let shouldMemoize = args.count == 2 // Currently support _rules_, not _functions_ + let sig = InvocationKey(funcName: funcName, args: args) + if shouldMemoize, let cachedResult = ctx.callMemo[sig] { + return cachedResult + } + var argValues: [AST.RegoValue] = [] for arg in args { // Note: we do not enforce that args are defined here, it appears @@ -974,24 +990,35 @@ private func evalCall( return .undefined } - return try await callPlanFunc( + let result = try await callPlanFunc( ctx: ctx, frame: frame, caller: caller, funcName: funcName, args: argValues ) + + if shouldMemoize { + ctx.callMemo[sig] = result + } + return result } // Handle plan-defined functions first if ctx.policy.funcs[funcName] != nil { - return try await callPlanFunc( + let result = try await callPlanFunc( ctx: ctx, frame: frame, caller: caller, funcName: funcName, args: argValues ) + + if shouldMemoize { + ctx.callMemo[sig] = result + } + + return result } // Handle built-in functions last diff --git a/Sources/Rego/Memo.swift b/Sources/Rego/Memo.swift new file mode 100644 index 0000000..f59b16a --- /dev/null +++ b/Sources/Rego/Memo.swift @@ -0,0 +1,60 @@ +import AST +import Foundation +import IR + +/// InvocationKey is a key for memoizing an IR function call invocation. +/// Note we capture the arguments as unresolved operands and not resolved values, +/// as hashing the values was proving extremely expensive. We instead rely on the +/// invariant the the plan / evaluator will not modify a local after it has been initally set. +struct InvocationKey: Hashable { + let funcName: String + let args: [IR.Operand] +} + +/// MemoCache is a memoization cache of plan invocations +typealias MemoCache = [InvocationKey: AST.RegoValue] + +/// MemoStack is a stack of MemoCaches +final class MemoStack { + var stack: [MemoCache] = [] +} + +extension MemoStack { + /// Get and set values on the cache at the top of the memo stack. + subscript(key: InvocationKey) -> AST.RegoValue? { + get { + guard !self.stack.isEmpty else { + return nil + } + return self.stack[self.stack.count - 1][key] + } + set { + if self.stack.isEmpty { + self.stack.append(MemoCache.init()) + } + self.stack[self.stack.count - 1][key] = newValue + } + } + + func push() { + self.stack.append(MemoCache.init()) + } + + func pop() { + guard !self.stack.isEmpty else { + return + } + self.stack.removeLast() + } + + /// withPush returns the result of calling the provided closure with + /// a fresh memoCache pushed on the stack. The memoCache will only be + /// active during that call, and discarded when it completes. + func withPush(_ body: () async throws -> T) async rethrows -> T { + self.push() + defer { + self.pop() + } + return try await body() + } +}