Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Jit prover should accept InputJSON format and execute a full block #2730

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 9 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -180,6 +180,15 @@ jobs:
run: |
make build-prover-bin
target/bin/prover target/machines/latest/machine.wavm.br -b --json-inputs="${{ github.workspace }}/target/TestProgramStorage/block_inputs.json"

- name: run jit prover on block input json
if: matrix.test-mode == 'defaults'
run: |
make build-jit
if [ -n "$(target/bin/jit --binary target/machines/latest/replay.wasm --cranelift --json-inputs='${{ github.workspace }}/target/TestProgramStorage/block_inputs.json')" ]; then
echo "Error: Command produced output."
exit 1
fi

- name: run challenge tests
if: matrix.test-mode == 'challenge'
Expand Down
120 changes: 62 additions & 58 deletions arbitrator/jit/src/machine.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@
// For license information, see https://github.com/nitro/blob/master/LICENSE

use crate::{
arbcompress, caller_env::GoRuntimeState, program, socket, stylus_backend::CothreadHandler,
wasip1_stub, wavmio, Opts,
arbcompress, caller_env::GoRuntimeState, prepare::prepare_env, program, socket,
stylus_backend::CothreadHandler, wasip1_stub, wavmio, Opts,
};
use arbutil::{Bytes32, Color, PreimageType};
use eyre::{bail, ErrReport, Result, WrapErr};
Expand Down Expand Up @@ -215,72 +215,76 @@ pub struct WasmEnv {

impl WasmEnv {
pub fn cli(opts: &Opts) -> Result<Self> {
let mut env = WasmEnv::default();
env.process.forks = opts.forks;
env.process.debug = opts.debug;
if let Some(json_inputs) = opts.json_inputs.clone() {
prepare_env(json_inputs, opts.debug)
} else {
let mut env = WasmEnv::default();
env.process.forks = opts.forks;
env.process.debug = opts.debug;

let mut inbox_position = opts.inbox_position;
let mut delayed_position = opts.delayed_inbox_position;
let mut inbox_position = opts.inbox_position;
let mut delayed_position = opts.delayed_inbox_position;

for path in &opts.inbox {
let mut msg = vec![];
File::open(path)?.read_to_end(&mut msg)?;
env.sequencer_messages.insert(inbox_position, msg);
inbox_position += 1;
}
for path in &opts.delayed_inbox {
let mut msg = vec![];
File::open(path)?.read_to_end(&mut msg)?;
env.delayed_messages.insert(delayed_position, msg);
delayed_position += 1;
}
for path in &opts.inbox {
let mut msg = vec![];
File::open(path)?.read_to_end(&mut msg)?;
env.sequencer_messages.insert(inbox_position, msg);
inbox_position += 1;
}
for path in &opts.delayed_inbox {
let mut msg = vec![];
File::open(path)?.read_to_end(&mut msg)?;
env.delayed_messages.insert(delayed_position, msg);
delayed_position += 1;
}

if let Some(path) = &opts.preimages {
let mut file = BufReader::new(File::open(path)?);
let mut preimages = Vec::new();
let filename = path.to_string_lossy();
loop {
let mut size_buf = [0u8; 8];
match file.read_exact(&mut size_buf) {
Ok(()) => {}
Err(err) if err.kind() == ErrorKind::UnexpectedEof => break,
Err(err) => bail!("Failed to parse {filename}: {}", err),
if let Some(path) = &opts.preimages {
let mut file = BufReader::new(File::open(path)?);
let mut preimages = Vec::new();
let filename = path.to_string_lossy();
loop {
let mut size_buf = [0u8; 8];
match file.read_exact(&mut size_buf) {
Ok(()) => {}
Err(err) if err.kind() == ErrorKind::UnexpectedEof => break,
Err(err) => bail!("Failed to parse {filename}: {}", err),
}
let size = u64::from_le_bytes(size_buf) as usize;
let mut buf = vec![0u8; size];
file.read_exact(&mut buf)?;
preimages.push(buf);
}
let keccak_preimages = env.preimages.entry(PreimageType::Keccak256).or_default();
for preimage in preimages {
let mut hasher = Keccak256::new();
hasher.update(&preimage);
let hash = hasher.finalize().into();
keccak_preimages.insert(hash, preimage);
}
let size = u64::from_le_bytes(size_buf) as usize;
let mut buf = vec![0u8; size];
file.read_exact(&mut buf)?;
preimages.push(buf);
}
let keccak_preimages = env.preimages.entry(PreimageType::Keccak256).or_default();
for preimage in preimages {
let mut hasher = Keccak256::new();
hasher.update(&preimage);
let hash = hasher.finalize().into();
keccak_preimages.insert(hash, preimage);
}
}

fn parse_hex(arg: &Option<String>, name: &str) -> Result<Bytes32> {
match arg {
Some(arg) => {
let mut arg = arg.as_str();
if arg.starts_with("0x") {
arg = &arg[2..];
fn parse_hex(arg: &Option<String>, name: &str) -> Result<Bytes32> {
match arg {
Some(arg) => {
let mut arg = arg.as_str();
if arg.starts_with("0x") {
arg = &arg[2..];
}
let mut bytes32 = [0u8; 32];
hex::decode_to_slice(arg, &mut bytes32)
.wrap_err_with(|| format!("failed to parse {} contents", name))?;
Ok(bytes32.into())
}
let mut bytes32 = [0u8; 32];
hex::decode_to_slice(arg, &mut bytes32)
.wrap_err_with(|| format!("failed to parse {} contents", name))?;
Ok(bytes32.into())
None => Ok(Bytes32::default()),
}
None => Ok(Bytes32::default()),
}
}

let last_block_hash = parse_hex(&opts.last_block_hash, "--last-block-hash")?;
let last_send_root = parse_hex(&opts.last_send_root, "--last-send-root")?;
env.small_globals = [opts.inbox_position, opts.position_within_message];
env.large_globals = [last_block_hash, last_send_root];
Ok(env)
let last_block_hash = parse_hex(&opts.last_block_hash, "--last-block-hash")?;
let last_send_root = parse_hex(&opts.last_send_root, "--last-send-root")?;
env.small_globals = [opts.inbox_position, opts.position_within_message];
env.large_globals = [last_block_hash, last_send_root];
Ok(env)
}
}

pub fn send_results(&mut self, error: Option<String>, memory_used: Pages) {
Expand Down
5 changes: 5 additions & 0 deletions arbitrator/jit/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ use structopt::StructOpt;
mod arbcompress;
mod caller_env;
mod machine;
mod prepare;
mod program;
mod socket;
mod stylus_backend;
Expand Down Expand Up @@ -46,6 +47,10 @@ pub struct Opts {
debug: bool,
#[structopt(long)]
require_success: bool,
// JSON inputs supercede any of the command-line inputs which could
// be specified in the JSON file.
#[structopt(long)]
json_inputs: Option<PathBuf>,
}

fn main() -> Result<()> {
Expand Down
73 changes: 73 additions & 0 deletions arbitrator/jit/src/prepare.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
// Copyright 2022-2024, Offchain Labs, Inc.
// For license information, see https://github.com/nitro/blob/master/LICENSE

ganeshvanahalli marked this conversation as resolved.
Show resolved Hide resolved
use crate::WasmEnv;
use arbutil::{Bytes32, PreimageType};
use eyre::Ok;
use prover::parse_input::FileData;
use std::env;
use std::fs::File;
use std::io::BufReader;
use std::path::PathBuf;

// local_target matches rawdb.LocalTarget() on the go side.
// While generating json_inputs file, one should make sure user_wasms map
// has entry for the system's arch that jit validation is being run on
pub fn local_target() -> String {
if env::consts::OS == "linux" {
match env::consts::ARCH {
"aarch64" => "arm64".to_string(),
"x86_64" => "amd64".to_string(),
_ => "host".to_string(),
}
} else {
"host".to_string()
}
}

pub fn prepare_env(json_inputs: PathBuf, debug: bool) -> eyre::Result<WasmEnv> {
let file = File::open(json_inputs)?;
let reader = BufReader::new(file);

let data = FileData::from_reader(reader)?;

let mut env = WasmEnv::default();
env.process.forks = false; // Should be set to false when using json_inputs
env.process.debug = debug;

let block_hash: [u8; 32] = data.start_state.block_hash.try_into().unwrap();
let block_hash: Bytes32 = block_hash.into();
let send_root: [u8; 32] = data.start_state.send_root.try_into().unwrap();
let send_root: Bytes32 = send_root.into();
let bytes32_vals: [Bytes32; 2] = [block_hash, send_root];
let u64_vals: [u64; 2] = [data.start_state.batch, data.start_state.pos_in_batch];
env.small_globals = u64_vals;
env.large_globals = bytes32_vals;

for batch_info in data.batch_info.iter() {
env.sequencer_messages
.insert(batch_info.number, batch_info.data_b64.clone());
}

if data.delayed_msg_nr != 0 && !data.delayed_msg_b64.is_empty() {
env.delayed_messages
.insert(data.delayed_msg_nr, data.delayed_msg_b64.clone());
}

for (ty, inner_map) in data.preimages_b64 {
let preimage_ty = PreimageType::try_from(ty as u8)?;
let map = env.preimages.entry(preimage_ty).or_default();
for (hash, preimage) in inner_map {
map.insert(hash, preimage);
}
}

if let Some(user_wasms) = data.user_wasms.get(&local_target()) {
for (module_hash, module_asm) in user_wasms.iter() {
env.module_asms
.insert(*module_hash, module_asm.as_vec().into());
}
}

Ok(env)
}
4 changes: 2 additions & 2 deletions staker/stateless_block_validator.go
Original file line number Diff line number Diff line change
Expand Up @@ -511,12 +511,12 @@ func (v *StatelessBlockValidator) ValidateResult(
return true, &entry.End, nil
}

func (v *StatelessBlockValidator) ValidationInputsAt(ctx context.Context, pos arbutil.MessageIndex, target ethdb.WasmTarget) (server_api.InputJSON, error) {
func (v *StatelessBlockValidator) ValidationInputsAt(ctx context.Context, pos arbutil.MessageIndex, targets ...ethdb.WasmTarget) (server_api.InputJSON, error) {
entry, err := v.CreateReadyValidationEntry(ctx, pos)
if err != nil {
return server_api.InputJSON{}, err
}
input, err := entry.ToInput([]ethdb.WasmTarget{target})
input, err := entry.ToInput(targets)
if err != nil {
return server_api.InputJSON{}, err
}
Expand Down
4 changes: 2 additions & 2 deletions system_tests/common_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -1728,7 +1728,7 @@ var (
// recordBlock writes a json file with all of the data needed to validate a block.
//
// This can be used as an input to the arbitrator prover to validate a block.
func recordBlock(t *testing.T, block uint64, builder *NodeBuilder) {
func recordBlock(t *testing.T, block uint64, builder *NodeBuilder, targets ...ethdb.WasmTarget) {
t.Helper()
flag.Parse()
if !*recordBlockInputsEnable {
Expand Down Expand Up @@ -1759,7 +1759,7 @@ func recordBlock(t *testing.T, block uint64, builder *NodeBuilder) {
}
validationInputsWriter, err := inputs.NewWriter(options...)
Require(t, err)
inputJson, err := builder.L2.ConsensusNode.StatelessBlockValidator.ValidationInputsAt(ctx, inboxPos, rawdb.TargetWavm)
inputJson, err := builder.L2.ConsensusNode.StatelessBlockValidator.ValidationInputsAt(ctx, inboxPos, targets...)
if err != nil {
Fatal(t, "failed to get validation inputs", block, err)
}
Expand Down
2 changes: 1 addition & 1 deletion system_tests/program_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -425,7 +425,7 @@ func storageTest(t *testing.T, jit bool) {

// Captures a block_inputs json file for the block that included the
// storage write transaction. Include wasm targets necessary for arbitrator prover and jit binaries
recordBlock(t, receipt.BlockNumber.Uint64(), builder)
recordBlock(t, receipt.BlockNumber.Uint64(), builder, rawdb.TargetWavm, rawdb.LocalTarget())
}

func TestProgramTransientStorage(t *testing.T) {
Expand Down
Loading