Skip to content

Commit

Permalink
Merge branch 'master' into google_storage_experimental
Browse files Browse the repository at this point in the history
  • Loading branch information
tsahee authored Oct 9, 2024
2 parents 1d643fd + 6eee30d commit 9789718
Show file tree
Hide file tree
Showing 41 changed files with 1,158 additions and 1,121 deletions.
86 changes: 73 additions & 13 deletions arbitrator/stylus/src/cache.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,16 @@ pub struct LruCounters {
pub does_not_fit: u32,
}

pub struct LongTermCounters {
pub hits: u32,
pub misses: u32,
}

pub struct InitCache {
long_term: HashMap<CacheKey, CacheItem>,
long_term_size_bytes: usize,
long_term_counters: LongTermCounters,

lru: CLruCache<CacheKey, CacheItem, RandomState, CustomWeightScale>,
lru_counters: LruCounters,
}
Expand Down Expand Up @@ -91,6 +99,20 @@ pub struct LruCacheMetrics {
pub does_not_fit: u32,
}

#[repr(C)]
pub struct LongTermCacheMetrics {
pub size_bytes: u64,
pub count: u32,
pub hits: u32,
pub misses: u32,
}

#[repr(C)]
pub struct CacheMetrics {
pub lru: LruCacheMetrics,
pub long_term: LongTermCacheMetrics,
}

pub fn deserialize_module(
module: &[u8],
version: u16,
Expand All @@ -117,6 +139,9 @@ impl InitCache {
fn new(size_bytes: usize) -> Self {
Self {
long_term: HashMap::new(),
long_term_size_bytes: 0,
long_term_counters: LongTermCounters { hits: 0, misses: 0 },

lru: CLruCache::with_config(
CLruCacheConfig::new(NonZeroUsize::new(size_bytes).unwrap())
.with_scale(CustomWeightScale),
Expand All @@ -136,22 +161,41 @@ impl InitCache {
}

/// Retrieves a cached value, updating items as necessary.
pub fn get(module_hash: Bytes32, version: u16, debug: bool) -> Option<(Module, Store)> {
let mut cache = cache!();
/// If long_term_tag is 1 and the item is only in LRU will insert to long term cache.
pub fn get(
module_hash: Bytes32,
version: u16,
long_term_tag: u32,
debug: bool,
) -> Option<(Module, Store)> {
let key = CacheKey::new(module_hash, version, debug);
let mut cache = cache!();

// See if the item is in the long term cache
if let Some(item) = cache.long_term.get(&key) {
return Some(item.data());
let data = item.data();
cache.long_term_counters.hits += 1;
return Some(data);
}
if long_term_tag == Self::ARBOS_TAG {
// only count misses only when we can expect to find the item in long term cache
cache.long_term_counters.misses += 1;
}

// See if the item is in the LRU cache, promoting if so
if let Some(item) = cache.lru.get(&key) {
let data = item.data();
if let Some(item) = cache.lru.peek(&key).cloned() {
cache.lru_counters.hits += 1;
return Some(data);
if long_term_tag == Self::ARBOS_TAG {
cache.long_term_size_bytes += item.entry_size_estimate_bytes;
cache.long_term.insert(key, item.clone());
} else {
// only calls get to move the key to the head of the LRU list
cache.lru.get(&key);
}
return Some((item.module, Store::new(item.engine)));
}
cache.lru_counters.misses += 1;

None
}

Expand All @@ -174,6 +218,7 @@ impl InitCache {
if let Some(item) = cache.lru.peek(&key).cloned() {
if long_term_tag == Self::ARBOS_TAG {
cache.long_term.insert(key, item.clone());
cache.long_term_size_bytes += item.entry_size_estimate_bytes;
} else {
// only calls get to move the key to the head of the LRU list
cache.lru.get(&key);
Expand All @@ -195,6 +240,7 @@ impl InitCache {
};
} else {
cache.long_term.insert(key, item);
cache.long_term_size_bytes += entry_size_estimate_bytes;
}
Ok(data)
}
Expand All @@ -207,6 +253,7 @@ impl InitCache {
let key = CacheKey::new(module_hash, version, debug);
let mut cache = cache!();
if let Some(item) = cache.long_term.remove(&key) {
cache.long_term_size_bytes -= item.entry_size_estimate_bytes;
if cache.lru.put_with_weight(key, item).is_err() {
eprintln!("{}", Self::DOES_NOT_FIT_MSG);
}
Expand All @@ -225,23 +272,32 @@ impl InitCache {
eprintln!("{}", Self::DOES_NOT_FIT_MSG);
}
}
cache.long_term_size_bytes = 0;
}

pub fn get_lru_metrics() -> LruCacheMetrics {
pub fn get_metrics() -> CacheMetrics {
let mut cache = cache!();

let count = cache.lru.len();
let metrics = LruCacheMetrics {
// add 1 to each entry to account that we subtracted 1 in the weight calculation
size_bytes: (cache.lru.weight() + count).try_into().unwrap(),
let lru_count = cache.lru.len();
let lru_metrics = LruCacheMetrics {
// adds 1 to each entry to account that we subtracted 1 in the weight calculation
size_bytes: (cache.lru.weight() + lru_count).try_into().unwrap(),

count: count.try_into().unwrap(),
count: lru_count.try_into().unwrap(),

hits: cache.lru_counters.hits,
misses: cache.lru_counters.misses,
does_not_fit: cache.lru_counters.does_not_fit,
};

let long_term_metrics = LongTermCacheMetrics {
size_bytes: cache.long_term_size_bytes.try_into().unwrap(),
count: cache.long_term.len().try_into().unwrap(),

hits: cache.long_term_counters.hits,
misses: cache.long_term_counters.misses,
};

// Empty counters.
// go side, which is the only consumer of this function besides tests,
// will read those counters and increment its own prometheus counters with them.
Expand All @@ -250,8 +306,12 @@ impl InitCache {
misses: 0,
does_not_fit: 0,
};
cache.long_term_counters = LongTermCounters { hits: 0, misses: 0 };

metrics
CacheMetrics {
lru: lru_metrics,
long_term: long_term_metrics,
}
}

// only used for testing
Expand Down
23 changes: 14 additions & 9 deletions arbitrator/stylus/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ use arbutil::{
format::DebugBytes,
Bytes32,
};
use cache::{deserialize_module, InitCache, LruCacheMetrics};
use cache::{deserialize_module, CacheMetrics, InitCache};
use evm_api::NativeRequestHandler;
use eyre::ErrReport;
use native::NativeInstance;
Expand Down Expand Up @@ -364,10 +364,10 @@ pub unsafe extern "C" fn stylus_drop_vec(vec: RustBytes) {
}
}

/// Gets lru cache metrics.
/// Gets cache metrics.
#[no_mangle]
pub extern "C" fn stylus_get_lru_cache_metrics() -> LruCacheMetrics {
InitCache::get_lru_metrics()
pub extern "C" fn stylus_get_cache_metrics() -> CacheMetrics {
InitCache::get_metrics()
}

/// Clears lru cache.
Expand All @@ -377,18 +377,23 @@ pub extern "C" fn stylus_clear_lru_cache() {
InitCache::clear_lru_cache()
}

/// Gets lru entry size in bytes.
/// Clears long term cache (for arbos_tag = 1)
/// Only used for testing purposes.
#[no_mangle]
pub extern "C" fn stylus_get_lru_entry_size_estimate_bytes(
pub extern "C" fn stylus_clear_long_term_cache() {
InitCache::clear_long_term(1);
}

/// Gets entry size in bytes.
/// Only used for testing purposes.
#[no_mangle]
pub extern "C" fn stylus_get_entry_size_estimate_bytes(
module: GoSliceData,
version: u16,
debug: bool,
) -> u64 {
match deserialize_module(module.slice(), version, debug) {
Err(error) => panic!("tried to get invalid asm!: {error}"),
Ok((_, _, lru_entry_size_estimate_bytes)) => {
lru_entry_size_estimate_bytes.try_into().unwrap()
}
Ok((_, _, entry_size_estimate_bytes)) => entry_size_estimate_bytes.try_into().unwrap(),
}
}
7 changes: 3 additions & 4 deletions arbitrator/stylus/src/native.rs
Original file line number Diff line number Diff line change
Expand Up @@ -121,13 +121,12 @@ impl<D: DataReader, E: EvmApi<D>> NativeInstance<D, E> {
let compile = CompileConfig::version(version, debug);
let env = WasmEnv::new(compile, None, evm, evm_data);
let module_hash = env.evm_data.module_hash;

if let Some((module, store)) = InitCache::get(module_hash, version, debug) {
return Self::from_module(module, store, env);
}
if !env.evm_data.cached {
long_term_tag = 0;
}
if let Some((module, store)) = InitCache::get(module_hash, version, long_term_tag, debug) {
return Self::from_module(module, store, env);
}
let (module, store) =
InitCache::insert(module_hash, module, version, long_term_tag, debug)?;
Self::from_module(module, store, env)
Expand Down
2 changes: 1 addition & 1 deletion arbnode/dataposter/data_poster.go
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@ import (
"github.com/ethereum/go-ethereum/rlp"
"github.com/ethereum/go-ethereum/rpc"
"github.com/ethereum/go-ethereum/signer/core/apitypes"
"github.com/go-redis/redis/v8"
"github.com/holiman/uint256"
"github.com/offchainlabs/nitro/arbnode/dataposter/dbstorage"
"github.com/offchainlabs/nitro/arbnode/dataposter/noop"
Expand All @@ -46,6 +45,7 @@ import (
"github.com/offchainlabs/nitro/util/rpcclient"
"github.com/offchainlabs/nitro/util/signature"
"github.com/offchainlabs/nitro/util/stopwaiter"
"github.com/redis/go-redis/v9"
"github.com/spf13/pflag"

redisstorage "github.com/offchainlabs/nitro/arbnode/dataposter/redis"
Expand Down
4 changes: 2 additions & 2 deletions arbnode/dataposter/redis/redisstorage.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,9 @@ import (
"errors"
"fmt"

"github.com/go-redis/redis/v8"
"github.com/offchainlabs/nitro/arbnode/dataposter/storage"
"github.com/offchainlabs/nitro/util/signature"
"github.com/redis/go-redis/v9"
)

// Storage implements redis sorted set backed storage. It does not support
Expand Down Expand Up @@ -196,7 +196,7 @@ func (s *Storage) Put(ctx context.Context, index uint64, prev, new *storage.Queu
if err != nil {
return err
}
if err := pipe.ZAdd(ctx, s.key, &redis.Z{
if err := pipe.ZAdd(ctx, s.key, redis.Z{
Score: float64(index),
Member: string(signedItem),
}).Err(); err != nil {
Expand Down
2 changes: 1 addition & 1 deletion arbnode/redislock/redis.go
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,8 @@ import (
"time"

"github.com/ethereum/go-ethereum/log"
"github.com/go-redis/redis/v8"
"github.com/offchainlabs/nitro/util/stopwaiter"
"github.com/redis/go-redis/v9"
flag "github.com/spf13/pflag"
)

Expand Down
2 changes: 1 addition & 1 deletion arbnode/seq_coordinator.go
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ import (
"sync/atomic"
"time"

"github.com/go-redis/redis/v8"
"github.com/redis/go-redis/v9"
flag "github.com/spf13/pflag"

"github.com/ethereum/go-ethereum/log"
Expand Down
5 changes: 4 additions & 1 deletion arbos/block_processor.go
Original file line number Diff line number Diff line change
Expand Up @@ -144,6 +144,7 @@ func ProduceBlock(
chainContext core.ChainContext,
chainConfig *params.ChainConfig,
isMsgForPrefetch bool,
runMode core.MessageRunMode,
) (*types.Block, types.Receipts, error) {
txes, err := ParseL2Transactions(message, chainConfig.ChainID)
if err != nil {
Expand All @@ -153,7 +154,7 @@ func ProduceBlock(

hooks := NoopSequencingHooks()
return ProduceBlockAdvanced(
message.Header, txes, delayedMessagesRead, lastBlockHeader, statedb, chainContext, chainConfig, hooks, isMsgForPrefetch,
message.Header, txes, delayedMessagesRead, lastBlockHeader, statedb, chainContext, chainConfig, hooks, isMsgForPrefetch, runMode,
)
}

Expand All @@ -168,6 +169,7 @@ func ProduceBlockAdvanced(
chainConfig *params.ChainConfig,
sequencingHooks *SequencingHooks,
isMsgForPrefetch bool,
runMode core.MessageRunMode,
) (*types.Block, types.Receipts, error) {

state, err := arbosState.OpenSystemArbosState(statedb, nil, true)
Expand Down Expand Up @@ -318,6 +320,7 @@ func ProduceBlockAdvanced(
tx,
&header.GasUsed,
vm.Config{},
runMode,
func(result *core.ExecutionResult) error {
return hooks.PostTxFilter(header, state, tx, sender, dataGas, result)
},
Expand Down
Loading

0 comments on commit 9789718

Please sign in to comment.