From 2246a31e726762ea741a299f598c7878fa66dd83 Mon Sep 17 00:00:00 2001 From: Vincent Ambo Date: Fri, 2 Sep 2022 21:49:11 +0300 Subject: refactor(tvix/eval): return call frame result from VM::call Previously, "calling" (setting up the VM run loop for executing a call frame) and "running" (running this loop to completion) were separate operations. This was basically an attempt to avoid nesting `VM::run` invocations. However, doing things this way introduced some tricky bugs for exiting out of the call frames of thunks vs. builtins & closures. For now, we unify the two operations and always return the value to the caller directly. For now this makes calls a little less effective, but it gives us a chance to nail down some other strange behaviours and then re-optimise this afterwards. To make sure we tackle this again further down I've added it to the list of known possible optimisations. Change-Id: I96828ab6a628136e0bac1bf03555faa4e6b74ece Reviewed-on: https://cl.tvl.fyi/c/depot/+/6415 Reviewed-by: sterni Tested-by: BuildkiteCI --- tvix/eval/src/value/thunk.rs | 4 ++-- tvix/eval/src/vm.rs | 32 ++++++++++++++++++++------------ 2 files changed, 22 insertions(+), 14 deletions(-) (limited to 'tvix/eval/src') diff --git a/tvix/eval/src/value/thunk.rs b/tvix/eval/src/value/thunk.rs index 4fd41689c7..59fe55cec9 100644 --- a/tvix/eval/src/value/thunk.rs +++ b/tvix/eval/src/value/thunk.rs @@ -84,9 +84,9 @@ impl Thunk { if let ThunkRepr::Suspended { lambda, upvalues } = std::mem::replace(&mut *thunk_mut, ThunkRepr::Blackhole) { - vm.call(lambda, upvalues, 0); *thunk_mut = ThunkRepr::Evaluated( - vm.run().map_err(|e| ErrorKind::ThunkForce(Box::new(e)))?, + vm.call(lambda, upvalues, 0) + .map_err(|e| ErrorKind::ThunkForce(Box::new(e)))?, ); } } diff --git a/tvix/eval/src/vm.rs b/tvix/eval/src/vm.rs index 8d616b8d73..fecaae37aa 100644 --- a/tvix/eval/src/vm.rs +++ b/tvix/eval/src/vm.rs @@ -161,7 +161,14 @@ impl VM { } } - pub fn call(&mut self, lambda: Rc, upvalues: Vec, arg_count: usize) { + /// Execute the given lambda in this VM's context, returning its + /// value after its stack frame completes. + pub fn call( + &mut self, + lambda: Rc, + upvalues: Vec, + arg_count: usize, + ) -> EvalResult { let frame = CallFrame { lambda, upvalues, @@ -170,22 +177,22 @@ impl VM { }; self.frames.push(frame); + self.run() } - pub fn run(&mut self) -> EvalResult { + /// Run the VM's current stack frame to completion and return the + /// value. + fn run(&mut self) -> EvalResult { #[cfg(feature = "disassembler")] let mut tracer = Tracer::new(); loop { + // Break the loop if this call frame has already run to + // completion, pop it off, and return the value to the + // caller. if self.frame().ip == self.chunk().code.len() { - // If this is the end of the top-level function, - // return, otherwise pop the call frame. - if self.frames.len() == 1 { - return Ok(self.pop()); - } - self.frames.pop(); - continue; + return Ok(self.pop()); } let op = self.inc_ip(); @@ -413,7 +420,9 @@ impl VM { let callable = self.pop(); match callable { Value::Closure(closure) => { - self.call(closure.lambda(), closure.upvalues().to_vec(), 1) + let result = + self.call(closure.lambda(), closure.upvalues().to_vec(), 1)?; + self.push(result) } Value::Builtin(builtin) => { @@ -684,8 +693,7 @@ pub fn run_lambda(lambda: Lambda) -> EvalResult { with_stack: vec![], }; - vm.call(Rc::new(lambda), vec![], 0); - let value = vm.run()?; + let value = vm.call(Rc::new(lambda), vec![], 0)?; vm.force_for_output(&value)?; Ok(value) } -- cgit 1.4.1