Skip to content

Commit

Permalink
Incremental Merging
Browse files Browse the repository at this point in the history
  • Loading branch information
dpl0a committed Apr 3, 2023
1 parent bfab9a2 commit 17031c9
Show file tree
Hide file tree
Showing 6 changed files with 219 additions and 59 deletions.
77 changes: 59 additions & 18 deletions src/eval/cache/incremental.rs
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ impl IncCache {
idx
}

fn revnode_as_explicit_fun<'a, I>(node: &IncNode, args: I) -> IncNode
fn revnode_as_explicit_fun<'a, I>(node: &mut IncNode, args: I)
where
I: DoubleEndedIterator<Item = &'a Ident>,
{
Expand All @@ -85,16 +85,12 @@ impl IncCache {
let as_function =
args.rfold(body, |built, id| RichTerm::from(Term::Fun(*id, built)));

IncNode::new(
Closure {
body: as_function,
env,
},
node.kind,
node.bty.clone(),
)
node.orig = Closure {
body: as_function,
env,
}
}
_ => node.clone(),
_ => (),
}
}

Expand Down Expand Up @@ -131,6 +127,27 @@ impl IncCache {
}
}

fn propagate_dirty_vec(&mut self, indices: Vec<CacheIndex>) {
let mut visited = HashSet::new();
let mut stack = indices;

while !stack.is_empty() {
let i = stack.pop().unwrap();
visited.insert(i);
let mut current_node = self.store.get_mut(i).unwrap();
current_node.cached = None;
current_node.state = IncNodeState::Suspended;
println!("IDX: {:?} BLs: {:?}", i, current_node.backlinks);
stack.extend(
current_node
.backlinks
.iter()
.map(|x| x.idx)
.filter(|x| !visited.contains(&x)),
)
}
}

/* Do we need this when we can revert in place?
fn propagate_revert(&mut self, id: Ident, idx: CacheIndex) -> HashMap<Ident, CacheIndex> {
Expand Down Expand Up @@ -174,11 +191,30 @@ impl IncCache {
let current_node = self.store.get_mut(*i).unwrap();

for dep in current_node.backlinks.iter_mut() {
dep.idx = *new_indices.get(i).unwrap();
dep.idx = if let Some(idx) = new_indices.get(&dep.idx) {
*idx
} else {
dep.idx
}
}

let mut to_be_updated = vec![];

for dep in current_node.fwdlinks.iter_mut() {
dep.idx = *new_indices.get(i).unwrap();
dep.idx = if let Some(idx) = new_indices.get(&dep.idx) {
*idx
} else {
to_be_updated.push(dep.clone());
dep.idx
}
}

for dep in to_be_updated {
let target_node = self.store.get_mut(dep.idx).unwrap();
target_node.backlinks.push(DependencyLink {
id: dep.id,
idx: *i,
});
}
}

Expand Down Expand Up @@ -345,7 +381,7 @@ impl Cache for IncCache {
env: &mut Environment,
fields: I,
) -> RichTerm {
let node = self.store.get(idx).unwrap();
let node = self.store.get_mut(idx).unwrap();

let mut deps_filter: Box<dyn FnMut(&&Ident) -> bool> = match node.bty.clone() {
BindingType::Revertible(FieldDeps::Known(deps)) => {
Expand All @@ -355,13 +391,10 @@ impl Cache for IncCache {
BindingType::Normal => Box::new(|_: &&Ident| false),
};

let node_as_function = self.add_node(IncCache::revnode_as_explicit_fun(
node,
fields.clone().filter(&mut deps_filter),
));
IncCache::revnode_as_explicit_fun(node, fields.clone().filter(&mut deps_filter));

let fresh_var = Ident::fresh();
env.insert(fresh_var, node_as_function);
env.insert(fresh_var, idx);

let as_function_closurized = RichTerm::from(Term::Var(fresh_var));
let args = fields.filter_map(|id| deps_filter(&id).then(|| RichTerm::from(Term::Var(*id))));
Expand All @@ -370,4 +403,12 @@ impl Cache for IncCache {
RichTerm::from(Term::App(partial_app, arg))
})
}

fn smart_clone(&mut self, v: Vec<CacheIndex>) -> HashMap<CacheIndex, CacheIndex> {
self.smart_clone(v)
}

fn propagate_dirty(&mut self, indices: Vec<CacheIndex>) {
self.propagate_dirty_vec(indices);
}
}
33 changes: 31 additions & 2 deletions src/eval/cache/lazy.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,11 @@ use crate::{
identifier::Ident,
term::{record::FieldDeps, BindingType, RichTerm, Term},
};
use std::cell::{Ref, RefCell, RefMut};
use std::rc::{Rc, Weak};
use std::{
cell::{Ref, RefCell, RefMut},
collections::HashMap,
};

/// The state of a thunk.
///
Expand Down Expand Up @@ -355,7 +358,7 @@ impl ThunkData {
/// inside a record may be invalidated by merging, and thus need to store the unaltered original
/// expression. Those aspects are handled and discussed in more detail in
/// [InnerThunkData].
#[derive(Clone, Debug, PartialEq)]
#[derive(Clone, Debug)]
pub struct Thunk {
data: Rc<RefCell<ThunkData>>,
ident_kind: IdentKind,
Expand Down Expand Up @@ -560,6 +563,21 @@ impl Thunk {
self.data.borrow().deps()
}
}

impl PartialEq for Thunk {
fn eq(&self, other: &Self) -> bool {
self.data.as_ptr() == other.data.as_ptr() && self.ident_kind == other.ident_kind
}
}

impl Eq for Thunk {}

impl std::hash::Hash for Thunk {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
let raw_ptr = self.data.as_ptr();
(self.ident_kind, raw_ptr).hash(state)
}
}
/// A thunk update frame.
///
/// A thunk update frame is put on the stack whenever a variable is entered, such that once this
Expand Down Expand Up @@ -692,4 +710,15 @@ impl Cache for CBNCache {
) -> Result<Self::UpdateIndex, BlackholedError> {
idx.mk_update_frame()
}

fn smart_clone(
&mut self,
v: Vec<CacheIndex>,
) -> std::collections::HashMap<CacheIndex, CacheIndex> {
v.into_iter()
.map(|idx| (idx.clone(), self.revert(&idx)))
.collect()
}

fn propagate_dirty(&mut self, indices: Vec<CacheIndex>) {}
}
6 changes: 6 additions & 0 deletions src/eval/cache/mod.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
use std::collections::HashMap;

/// The Nickel generic evaluation cache. This module abstracts away the details for managing
/// suspended computations and their memoization strategies.
///
Expand Down Expand Up @@ -94,4 +96,8 @@ pub trait Cache: Clone {
&mut self,
idx: &mut CacheIndex,
) -> Result<Self::UpdateIndex, BlackholedError>;

fn smart_clone(&mut self, v: Vec<CacheIndex>) -> HashMap<CacheIndex, CacheIndex>;

fn propagate_dirty(&mut self, indices: Vec<CacheIndex>);
}
21 changes: 14 additions & 7 deletions src/eval/fixpoint.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
//! Compute the fixpoint of a recursive record.
use super::{merge::RevertClosurize, *};
use crate::{label::Label, position::TermPos};
use std::collections::HashSet;

use super::{merge::{field_deps, RevertClosurize}, *};
use crate::{position::TermPos, term::record::FieldDeps, label::Label};

// Update the environment of a term by extending it with a recursive environment. In the general
// case, the term is expected to be a variable pointing to the element to be patched. Otherwise, it's
Expand Down Expand Up @@ -83,7 +85,7 @@ pub fn rec_env<'a, I: Iterator<Item = (&'a Ident, &'a Field)>, C: Cache>(
// so we start from in the environment of the original record.
let mut final_env = env.clone();
let id_value = Ident::fresh();
final_env.insert(id_value, idx);
final_env.insert(id_value, idx.clone());

let with_ctr_applied = PendingContract::apply_all(
RichTerm::new(Term::Var(id_value), value.pos),
Expand Down Expand Up @@ -131,10 +133,15 @@ pub fn rec_env<'a, I: Iterator<Item = (&'a Ident, &'a Field)>, C: Cache>(
env: final_env,
};

Ok((
*id,
cache.add(final_closure, IdentKind::Record, BindingType::Normal),
))
let deps = FieldDeps::from(HashSet::from([*id]));
let mut new_idx = cache.add(
final_closure,
IdentKind::Record,
BindingType::Revertible(deps),
);
cache.build_cached(&mut new_idx, &[(*id, idx)]);

Ok((*id, new_idx))
} else {
let error = EvalError::MissingFieldDef {
id: *id,
Expand Down
Loading

0 comments on commit 17031c9

Please sign in to comment.