Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[compiler-v2] Improved flush writes optimization #15718

Merged
merged 1 commit into from
Jan 29, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -151,27 +151,24 @@ impl<'a> FunctionGenerator<'a> {
type_parameters: fun_env.get_type_parameters(),
def_idx,
});
if fun_gen.spec_blocks.is_empty() {
// Currently, peephole optimizations require that there are no inline spec blocks.
// This is to ensure that spec-related data structures do not refer to code
// offsets which could be changed by the peephole optimizer.
let options = ctx
.env
.get_extension::<Options>()
.expect("Options is available");
if options.experiment_on(Experiment::PEEPHOLE_OPTIMIZATION) {
let transformed_code_chunk = peephole_optimizer::optimize(&code.code);
// Fix the source map for the optimized code.
fun_gen
.gen
.source_map
.remap_code_map(def_idx, transformed_code_chunk.original_offsets)
.expect(SOURCE_MAP_OK);
// Replace the code with the optimized one.
code.code = transformed_code_chunk.code;
}
} else {
// Write the spec block table back to the environment.
let options = ctx
.env
.get_extension::<Options>()
.expect("Options is available");
if options.experiment_on(Experiment::PEEPHOLE_OPTIMIZATION) {
let transformed_code_chunk = peephole_optimizer::optimize(&code.code);
// Fix the source map for the optimized code.
fun_gen
.gen
.source_map
.remap_code_map(def_idx, &transformed_code_chunk.original_offsets)
.expect(SOURCE_MAP_OK);
// Replace the code with the optimized one.
code.code = transformed_code_chunk.code;
// Remap the spec blocks to the new code offsets.
fun_gen.remap_spec_blocks(&transformed_code_chunk.original_offsets);
}
if !fun_gen.spec_blocks.is_empty() {
fun_env.get_mut_spec().on_impl = fun_gen.spec_blocks;
}
(fun_gen.gen, Some(code))
Expand Down Expand Up @@ -913,6 +910,34 @@ impl<'a> FunctionGenerator<'a> {
self.emit(FF::Bytecode::Nop)
}

/// Remap the spec blocks, given the mapping of new offsets to original offsets.
fn remap_spec_blocks(&mut self, new_to_original_offsets: &[CodeOffset]) {
if new_to_original_offsets.is_empty() {
return;
}
let old_to_new = new_to_original_offsets
.iter()
.enumerate()
.map(|(new_offset, old_offset)| (*old_offset, new_offset as CodeOffset))
.collect::<BTreeMap<_, _>>();
let largest_offset = (new_to_original_offsets.len() - 1) as CodeOffset;

// Rewrite the spec blocks mapping.
self.spec_blocks = std::mem::take(&mut self.spec_blocks)
.into_iter()
.map(|(old_offset, spec)| {
// If there is no mapping found for the old offset, then we use the next largest
// offset. If there is no such offset, then we use the overall largest offset.
let new_offset = old_to_new
.range(old_offset..)
.next()
.map(|(_, v)| *v)
.unwrap_or(largest_offset);
(new_offset, spec)
})
.collect::<BTreeMap<_, _>>();
}

/// Emits a file-format bytecode.
fn emit(&mut self, bc: FF::Bytecode) {
self.code.push(bc)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@ use move_stackless_bytecode::{
function_target::{FunctionData, FunctionTarget},
function_target_pipeline::{FunctionTargetProcessor, FunctionTargetsHolder},
stackless_bytecode::Bytecode,
stackless_control_flow_graph::StacklessControlFlowGraph,
};
use std::collections::{BTreeMap, BTreeSet};

Expand Down Expand Up @@ -81,7 +80,6 @@ impl ReducedDefUseGraph {
eliminate_all_self_assigns: bool,
) -> BTreeSet<u16> {
let code = target.get_bytecode();
let cfg = StacklessControlFlowGraph::new_forward(code);
let live_vars = target
.get_annotations()
.get::<LiveVarAnnotation>()
Expand Down Expand Up @@ -113,7 +111,6 @@ impl ReducedDefUseGraph {
let eliminate_this_self_assign = Self::should_eliminate_given_self_assign(
self_assign,
code,
&cfg,
live_vars,
eliminate_all_self_assigns,
);
Expand Down Expand Up @@ -232,27 +229,29 @@ impl ReducedDefUseGraph {

/// Should `self_assign` be eliminated?
fn should_eliminate_given_self_assign(
self_assign: CodeOffset,
self_assign_offset: CodeOffset,
code: &[Bytecode],
cfg: &StacklessControlFlowGraph,
live_vars: &LiveVarAnnotation,
eliminate_all_self_assigns: bool,
) -> bool {
if !eliminate_all_self_assigns {
vineethk marked this conversation as resolved.
Show resolved Hide resolved
// Eliminate this self assign if the definition for this self-assign is in the same block
// before the self assign.
let block = cfg.enclosing_block(self_assign);
let block_begin_offset = cfg.code_range(block).start;
let self_assign_instr = &code[self_assign as usize];
// Eliminate this self assign if each of its uses are the last sources of their instructions.
let self_assign_instr = &code[self_assign_offset as usize];
let self_assign_temp = self_assign_instr.dests()[0];
// Is `self_assign_temp` live before this block?
let info = live_vars
.get_info_at(block_begin_offset as CodeOffset)
.before
let live_info_after = live_vars
.get_info_at(self_assign_offset)
.after
.get(&self_assign_temp);
match info {
None => true, // must be defined in the block
Some(live) => !live.usage_offsets().contains(&self_assign),
match live_info_after {
None => true,
Some(live) => live.usage_offsets().iter().all(|use_offset| {
let use_instr = &code[*use_offset as usize];
let sources = use_instr.sources();
sources
.iter()
.position(|source| *source == self_assign_temp)
.is_some_and(|pos| pos == sources.len() - 1)
}),
}
} else {
true
Expand Down
Loading
Loading