Skip to content

Commit

Permalink
Incremental Merging
Browse files Browse the repository at this point in the history
  • Loading branch information
dpl0a committed Apr 3, 2023
1 parent 120b363 commit 152993b
Show file tree
Hide file tree
Showing 6 changed files with 331 additions and 65 deletions.
189 changes: 168 additions & 21 deletions src/eval/cache/incremental.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
/// A [Cache] implementation with incremental computation features.
//! A [Cache] implementation with incremental computation features.
use std::collections::{HashMap, HashSet};

use super::{BlackholedError, Cache, CacheIndex, Closure, Environment, IdentKind};
use crate::{
Expand All @@ -15,6 +16,12 @@ pub enum IncNodeState {
Evaluated,
}

#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub struct DependencyLink {
id: Ident,
idx: CacheIndex,
}

/// A node in the dependent computation graph stored in [IncCache].
#[derive(Debug, Clone)]
pub struct IncNode {
Expand All @@ -26,8 +33,10 @@ pub struct IncNode {
bty: BindingType,
// The state of the node.
state: IncNodeState,
// Forward links to dependencies.
fwdlinks: Vec<DependencyLink>,
// Backlinks to nodes depending on this node.
backlinks: Vec<CacheIndex>,
backlinks: Vec<DependencyLink>,
}

impl IncNode {
Expand All @@ -38,6 +47,7 @@ impl IncNode {
kind,
bty,
state: IncNodeState::default(),
fwdlinks: Vec::new(),
backlinks: Vec::new(),
}
}
Expand All @@ -60,7 +70,7 @@ impl IncCache {
idx
}

fn revnode_as_explicit_fun<'a, I>(node: &IncNode, args: I) -> IncNode
fn revnode_as_explicit_fun<'a, I>(node: &mut IncNode, args: I)
where
I: DoubleEndedIterator<Item = &'a Ident>,
{
Expand All @@ -75,18 +85,141 @@ impl IncCache {
let as_function =
args.rfold(body, |built, id| RichTerm::from(Term::Fun(*id, built)));

IncNode::new(
Closure {
body: as_function,
env,
},
node.kind,
node.bty.clone(),
)
node.orig = Closure {
body: as_function,
env,
}
}
_ => node.clone(),
_ => (),
}
}

fn update_backlinks(&mut self, idx: CacheIndex) {
let node = self.store.get(idx).unwrap().clone();
for i in node.fwdlinks {
let n = self.store.get_mut(i.idx).unwrap();
n.backlinks.push(DependencyLink { id: i.id, idx: idx });
}
}

fn propagate_dirty(&mut self, idx: CacheIndex) {
let mut node = self.store.get_mut(idx).unwrap();
node.cached = None;
node.state = IncNodeState::Suspended;

let mut visited = HashSet::new();
let mut stack = node.backlinks.clone();

visited.insert(idx);

while !stack.is_empty() {
let i = stack.pop().unwrap();
visited.insert(i.idx);
let mut current_node = self.store.get_mut(i.idx).unwrap();
current_node.cached = None;
current_node.state = IncNodeState::Suspended;
stack.extend(
current_node
.backlinks
.iter()
.filter(|x| !visited.contains(&x.idx)),
)
}
}

fn propagate_dirty_vec(&mut self, indices: Vec<CacheIndex>) {
let mut visited = HashSet::new();
let mut stack = indices;

while !stack.is_empty() {
let i = stack.pop().unwrap();
visited.insert(i);
let mut current_node = self.store.get_mut(i).unwrap();
current_node.cached = None;
current_node.state = IncNodeState::Suspended;
println!("IDX: {:?} BLs: {:?}", i, current_node.backlinks);
stack.extend(
current_node
.backlinks
.iter()
.map(|x| x.idx)
.filter(|x| !visited.contains(&x)),
)
}
}

/* Do we need this when we can revert in place?
fn propagate_revert(&mut self, id: Ident, idx: CacheIndex) -> HashMap<Ident, CacheIndex> {
let mut nodes_reverted = HashMap::new();
let mut visited = HashSet::new();
let mut stack = vec![idx];
while !stack.is_empty() {
let i = stack.pop().unwrap();
visited.insert(i);
let idx_reverted = self.revert(&idx);
//FIXME: use the actual node's id
let node_id = Ident::from("TODO!");
nodes_reverted.insert(node_id, idx_reverted);
let current_node = self.store.get(i).unwrap();
stack.extend(
current_node
.backlinks
.iter()
.map(|x| x.idx)
.filter(|x| !visited.contains(x)),
)
}
nodes_reverted
} */

fn smart_clone(&mut self, v: Vec<CacheIndex>) -> HashMap<CacheIndex, CacheIndex> {
let mut new_indices = HashMap::new();

for i in v.iter() {
let current_node = self.store.get(*i).unwrap().clone();
new_indices.insert(*i, self.add_node(current_node));
}

for i in new_indices.values() {
let current_node = self.store.get_mut(*i).unwrap();

for dep in current_node.backlinks.iter_mut() {
dep.idx = if let Some(idx) = new_indices.get(&dep.idx) {
*idx
} else {
dep.idx
}
}

let mut to_be_updated = vec![];

for dep in current_node.fwdlinks.iter_mut() {
dep.idx = if let Some(idx) = new_indices.get(&dep.idx) {
*idx
} else {
to_be_updated.push(dep.clone());
dep.idx
}
}

for dep in to_be_updated {
let target_node = self.store.get_mut(dep.idx).unwrap();
target_node.backlinks.push(DependencyLink {
id: dep.id,
idx: *i,
});
}
}

new_indices
}
}

impl Cache for IncCache {
Expand Down Expand Up @@ -204,9 +337,12 @@ impl Cache for IncCache {
kind: node.kind,
bty: node.bty.clone(),
state: node.state,
fwdlinks: node.fwdlinks.clone(),
backlinks: node.backlinks.clone(),
};

// TODO: Should this push the dependencies?

self.add_node(new_node)
}

Expand All @@ -224,13 +360,19 @@ impl Cache for IncCache {
BindingType::Revertible(ref deps) => match deps {
FieldDeps::Unknown => new_cached.env.extend(rec_env.iter().cloned()),
FieldDeps::Known(deps) if deps.is_empty() => (),
FieldDeps::Known(deps) => new_cached
.env
.extend(rec_env.iter().filter(|(id, _)| deps.contains(id)).cloned()),
FieldDeps::Known(deps) => {
let deps = rec_env.iter().filter(|(id, _)| deps.contains(id)).cloned();
node.fwdlinks = deps
.clone()
.map(|(id, idx)| DependencyLink { id, idx })
.collect();
new_cached.env.extend(deps);
}
},
}

node.cached = Some(new_cached);
self.update_backlinks(*idx);
}

fn saturate<'a, I: DoubleEndedIterator<Item = &'a Ident> + Clone>(
Expand All @@ -239,7 +381,7 @@ impl Cache for IncCache {
env: &mut Environment,
fields: I,
) -> RichTerm {
let node = self.store.get(idx).unwrap();
let node = self.store.get_mut(idx).unwrap();

let mut deps_filter: Box<dyn FnMut(&&Ident) -> bool> = match node.bty.clone() {
BindingType::Revertible(FieldDeps::Known(deps)) => {
Expand All @@ -249,13 +391,10 @@ impl Cache for IncCache {
BindingType::Normal => Box::new(|_: &&Ident| false),
};

let node_as_function = self.add_node(IncCache::revnode_as_explicit_fun(
node,
fields.clone().filter(&mut deps_filter),
));
IncCache::revnode_as_explicit_fun(node, fields.clone().filter(&mut deps_filter));

let fresh_var = Ident::fresh();
env.insert(fresh_var, node_as_function);
env.insert(fresh_var, idx);

let as_function_closurized = RichTerm::from(Term::Var(fresh_var));
let args = fields.filter_map(|id| deps_filter(&id).then(|| RichTerm::from(Term::Var(*id))));
Expand All @@ -264,4 +403,12 @@ impl Cache for IncCache {
RichTerm::from(Term::App(partial_app, arg))
})
}

fn smart_clone(&mut self, v: Vec<CacheIndex>) -> HashMap<CacheIndex, CacheIndex> {
self.smart_clone(v)
}

fn propagate_dirty(&mut self, indices: Vec<CacheIndex>) {
self.propagate_dirty_vec(indices);
}
}
33 changes: 31 additions & 2 deletions src/eval/cache/lazy.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,11 @@ use crate::{
identifier::Ident,
term::{record::FieldDeps, BindingType, RichTerm, Term},
};
use std::cell::{Ref, RefCell, RefMut};
use std::rc::{Rc, Weak};
use std::{
cell::{Ref, RefCell, RefMut},
collections::HashMap,
};

/// The state of a thunk.
///
Expand Down Expand Up @@ -355,7 +358,7 @@ impl ThunkData {
/// inside a record may be invalidated by merging, and thus need to store the unaltered original
/// expression. Those aspects are handled and discussed in more detail in
/// [InnerThunkData].
#[derive(Clone, Debug, PartialEq)]
#[derive(Clone, Debug)]
pub struct Thunk {
data: Rc<RefCell<ThunkData>>,
ident_kind: IdentKind,
Expand Down Expand Up @@ -560,6 +563,21 @@ impl Thunk {
self.data.borrow().deps()
}
}

impl PartialEq for Thunk {
fn eq(&self, other: &Self) -> bool {
self.data.as_ptr() == other.data.as_ptr() && self.ident_kind == other.ident_kind
}
}

impl Eq for Thunk {}

impl std::hash::Hash for Thunk {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
let raw_ptr = self.data.as_ptr();
(self.ident_kind, raw_ptr).hash(state)
}
}
/// A thunk update frame.
///
/// A thunk update frame is put on the stack whenever a variable is entered, such that once this
Expand Down Expand Up @@ -692,4 +710,15 @@ impl Cache for CBNCache {
) -> Result<Self::UpdateIndex, BlackholedError> {
idx.mk_update_frame()
}

fn smart_clone(
&mut self,
v: Vec<CacheIndex>,
) -> std::collections::HashMap<CacheIndex, CacheIndex> {
v.into_iter()
.map(|idx| (idx.clone(), self.revert(&idx)))
.collect()
}

fn propagate_dirty(&mut self, indices: Vec<CacheIndex>) {}
}
12 changes: 9 additions & 3 deletions src/eval/cache/mod.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
use std::collections::HashMap;

/// The Nickel generic evaluation cache. This module abstracts away the details for managing
/// suspended computations and their memoization strategies.
///
Expand All @@ -11,15 +13,15 @@ use crate::{
};

pub mod lazy;
//pub mod incremental;
// pub mod incremental;

/// An index to a specific item stored in the cache
pub type CacheIndex = lazy::Thunk;
//pub type CacheIndex = usize;
// pub type CacheIndex = usize;

/// The current Cache implementation
pub type CacheImpl = lazy::CBNCache;
//pub type CacheImpl = incremental::IncCache;
// pub type CacheImpl = incremental::IncCache;

/// A black-holed node was accessed, which would lead to infinite recursion.
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
Expand Down Expand Up @@ -94,4 +96,8 @@ pub trait Cache: Clone {
&mut self,
idx: &mut CacheIndex,
) -> Result<Self::UpdateIndex, BlackholedError>;

fn smart_clone(&mut self, v: Vec<CacheIndex>) -> HashMap<CacheIndex, CacheIndex>;

fn propagate_dirty(&mut self, indices: Vec<CacheIndex>);
}
Loading

0 comments on commit 152993b

Please sign in to comment.