use crate::ir::{self, types, SourceLoc};
use crate::machinst::*;
use crate::settings;
use regalloc::Function as RegallocFunction;
use regalloc::Set as RegallocSet;
use regalloc::{
BlockIx, InstIx, Range, RegAllocResult, RegClass, RegUsageCollector, RegUsageMapper, SpillSlot,
StackmapRequestInfo,
};
use alloc::boxed::Box;
use alloc::{borrow::Cow, vec::Vec};
use std::fmt;
use std::iter;
use std::string::String;
pub type InsnIndex = u32;
pub type BlockIndex = u32;
pub trait VCodeInst: MachInst + MachInstEmit {}
impl<I: MachInst + MachInstEmit> VCodeInst for I {}
pub struct VCode<I: VCodeInst> {
liveins: RegallocSet<RealReg>,
liveouts: RegallocSet<RealReg>,
vreg_types: Vec<Type>,
have_ref_values: bool,
insts: Vec<I>,
srclocs: Vec<SourceLoc>,
entry: BlockIndex,
block_ranges: Vec<(InsnIndex, InsnIndex)>,
block_succ_range: Vec<(usize, usize)>,
block_succs: Vec<BlockIx>,
block_order: BlockLoweringOrder,
abi: Box<dyn ABIBody<I = I>>,
safepoint_insns: Vec<InsnIndex>,
safepoint_slots: Vec<Vec<SpillSlot>>,
}
pub struct VCodeBuilder<I: VCodeInst> {
vcode: VCode<I>,
stackmap_info: StackmapRequestInfo,
block_start: InsnIndex,
succ_start: usize,
cur_srcloc: SourceLoc,
}
impl<I: VCodeInst> VCodeBuilder<I> {
pub fn new(abi: Box<dyn ABIBody<I = I>>, block_order: BlockLoweringOrder) -> VCodeBuilder<I> {
let reftype_class = I::ref_type_regclass(abi.flags());
let vcode = VCode::new(abi, block_order);
let stackmap_info = StackmapRequestInfo {
reftype_class,
reftyped_vregs: vec![],
safepoint_insns: vec![],
};
VCodeBuilder {
vcode,
stackmap_info,
block_start: 0,
succ_start: 0,
cur_srcloc: SourceLoc::default(),
}
}
pub fn abi(&mut self) -> &mut dyn ABIBody<I = I> {
&mut *self.vcode.abi
}
pub fn block_order(&self) -> &BlockLoweringOrder {
&self.vcode.block_order
}
pub fn set_vreg_type(&mut self, vreg: VirtualReg, ty: Type) {
if self.vcode.vreg_types.len() <= vreg.get_index() {
self.vcode
.vreg_types
.resize(vreg.get_index() + 1, ir::types::I8);
}
self.vcode.vreg_types[vreg.get_index()] = ty;
if is_reftype(ty) {
self.stackmap_info.reftyped_vregs.push(vreg);
self.vcode.have_ref_values = true;
}
}
pub fn have_ref_values(&self) -> bool {
self.vcode.have_ref_values()
}
pub fn set_entry(&mut self, block: BlockIndex) {
self.vcode.entry = block;
}
pub fn end_bb(&mut self) {
let start_idx = self.block_start;
let end_idx = self.vcode.insts.len() as InsnIndex;
self.block_start = end_idx;
self.vcode.block_ranges.push((start_idx, end_idx));
let succ_end = self.vcode.block_succs.len();
self.vcode
.block_succ_range
.push((self.succ_start, succ_end));
self.succ_start = succ_end;
}
pub fn push(&mut self, insn: I, is_safepoint: bool) {
match insn.is_term() {
MachTerminator::None | MachTerminator::Ret => {}
MachTerminator::Uncond(target) => {
self.vcode.block_succs.push(BlockIx::new(target.get()));
}
MachTerminator::Cond(true_branch, false_branch) => {
self.vcode.block_succs.push(BlockIx::new(true_branch.get()));
self.vcode
.block_succs
.push(BlockIx::new(false_branch.get()));
}
MachTerminator::Indirect(targets) => {
for target in targets {
self.vcode.block_succs.push(BlockIx::new(target.get()));
}
}
}
self.vcode.insts.push(insn);
self.vcode.srclocs.push(self.cur_srcloc);
if is_safepoint {
self.stackmap_info
.safepoint_insns
.push(InstIx::new((self.vcode.insts.len() - 1) as u32));
}
}
pub fn get_srcloc(&self) -> SourceLoc {
self.cur_srcloc
}
pub fn set_srcloc(&mut self, srcloc: SourceLoc) {
self.cur_srcloc = srcloc;
}
pub fn build(self) -> (VCode<I>, StackmapRequestInfo) {
(self.vcode, self.stackmap_info)
}
}
fn is_redundant_move<I: VCodeInst>(insn: &I) -> bool {
if let Some((to, from)) = insn.is_move() {
to.to_reg() == from
} else {
false
}
}
fn is_reftype(ty: Type) -> bool {
ty == types::R64 || ty == types::R32
}
impl<I: VCodeInst> VCode<I> {
fn new(abi: Box<dyn ABIBody<I = I>>, block_order: BlockLoweringOrder) -> VCode<I> {
VCode {
liveins: abi.liveins(),
liveouts: abi.liveouts(),
vreg_types: vec![],
have_ref_values: false,
insts: vec![],
srclocs: vec![],
entry: 0,
block_ranges: vec![],
block_succ_range: vec![],
block_succs: vec![],
block_order,
abi,
safepoint_insns: vec![],
safepoint_slots: vec![],
}
}
pub fn flags(&self) -> &settings::Flags {
self.abi.flags()
}
pub fn vreg_type(&self, vreg: VirtualReg) -> Type {
self.vreg_types[vreg.get_index()]
}
pub fn have_ref_values(&self) -> bool {
self.have_ref_values
}
pub fn entry(&self) -> BlockIndex {
self.entry
}
pub fn num_blocks(&self) -> usize {
self.block_ranges.len()
}
pub fn frame_size(&self) -> u32 {
self.abi.frame_size()
}
pub fn stack_args_size(&self) -> u32 {
self.abi.stack_args_size()
}
pub fn succs(&self, block: BlockIndex) -> &[BlockIx] {
let (start, end) = self.block_succ_range[block as usize];
&self.block_succs[start..end]
}
pub fn replace_insns_from_regalloc(&mut self, result: RegAllocResult<Self>) {
self.abi.set_num_spillslots(result.num_spill_slots as usize);
self.abi
.set_clobbered(result.clobbered_registers.map(|r| Writable::from_reg(*r)));
let mut final_insns = vec![];
let mut final_block_ranges = vec![(0, 0); self.num_blocks()];
let mut final_srclocs = vec![];
let mut final_safepoint_insns = vec![];
let mut safept_idx = 0;
assert!(result.target_map.elems().len() == self.num_blocks());
for block in 0..self.num_blocks() {
let start = result.target_map.elems()[block].get() as usize;
let end = if block == self.num_blocks() - 1 {
result.insns.len()
} else {
result.target_map.elems()[block + 1].get() as usize
};
let block = block as BlockIndex;
let final_start = final_insns.len() as InsnIndex;
if block == self.entry {
let prologue = self.abi.gen_prologue();
let len = prologue.len();
final_insns.extend(prologue.into_iter());
final_srclocs.extend(iter::repeat(SourceLoc::default()).take(len));
}
for i in start..end {
let insn = &result.insns[i];
if is_redundant_move(insn) {
continue;
}
let orig_iix = result.orig_insn_map[InstIx::new(i as u32)];
let srcloc = if orig_iix.is_invalid() {
SourceLoc::default()
} else {
self.srclocs[orig_iix.get() as usize]
};
let is_ret = insn.is_term() == MachTerminator::Ret;
if is_ret {
let epilogue = self.abi.gen_epilogue();
let len = epilogue.len();
final_insns.extend(epilogue.into_iter());
final_srclocs.extend(iter::repeat(srcloc).take(len));
} else {
final_insns.push(insn.clone());
final_srclocs.push(srcloc);
}
if safept_idx < result.new_safepoint_insns.len()
&& (result.new_safepoint_insns[safept_idx].get() as usize) == i
{
let idx = final_insns.len() - 1;
final_safepoint_insns.push(idx as InsnIndex);
safept_idx += 1;
}
}
let final_end = final_insns.len() as InsnIndex;
final_block_ranges[block as usize] = (final_start, final_end);
}
debug_assert!(final_insns.len() == final_srclocs.len());
self.insts = final_insns;
self.srclocs = final_srclocs;
self.block_ranges = final_block_ranges;
self.safepoint_insns = final_safepoint_insns;
self.safepoint_slots = result.stackmaps;
}
pub fn emit(&self) -> MachBuffer<I>
where
I: MachInstEmit,
{
let mut buffer = MachBuffer::new();
let mut state = I::State::new(&*self.abi);
buffer.reserve_labels_for_blocks(self.num_blocks() as BlockIndex);
let flags = self.abi.flags();
let mut safepoint_idx = 0;
let mut cur_srcloc = None;
for block in 0..self.num_blocks() {
let block = block as BlockIndex;
let new_offset = I::align_basic_block(buffer.cur_offset());
while new_offset > buffer.cur_offset() {
let nop = I::gen_nop((new_offset - buffer.cur_offset()) as usize);
nop.emit(&mut buffer, flags, &mut Default::default());
}
assert_eq!(buffer.cur_offset(), new_offset);
let (start, end) = self.block_ranges[block as usize];
buffer.bind_label(MachLabel::from_block(block));
for iix in start..end {
let srcloc = self.srclocs[iix as usize];
if cur_srcloc != Some(srcloc) {
if cur_srcloc.is_some() {
buffer.end_srcloc();
}
buffer.start_srcloc(srcloc);
cur_srcloc = Some(srcloc);
}
if safepoint_idx < self.safepoint_insns.len()
&& self.safepoint_insns[safepoint_idx] == iix
{
if self.safepoint_slots[safepoint_idx].len() > 0 {
let stackmap = self.abi.spillslots_to_stackmap(
&self.safepoint_slots[safepoint_idx][..],
&state,
);
state.pre_safepoint(stackmap);
}
safepoint_idx += 1;
}
self.insts[iix as usize].emit(&mut buffer, flags, &mut state);
}
if cur_srcloc.is_some() {
buffer.end_srcloc();
cur_srcloc = None;
}
if block < (self.num_blocks() - 1) as BlockIndex {
let next_block = block + 1;
let next_block_range = self.block_ranges[next_block as usize];
let next_block_size = next_block_range.1 - next_block_range.0;
let worst_case_next_bb = I::worst_case_size() * next_block_size;
if buffer.island_needed(worst_case_next_bb) {
buffer.emit_island();
}
}
}
buffer
}
pub fn bindex_to_bb(&self, block: BlockIndex) -> Option<ir::Block> {
self.block_order.lowered_order()[block as usize].orig_block()
}
}
impl<I: VCodeInst> RegallocFunction for VCode<I> {
type Inst = I;
fn insns(&self) -> &[I] {
&self.insts[..]
}
fn insns_mut(&mut self) -> &mut [I] {
&mut self.insts[..]
}
fn get_insn(&self, insn: InstIx) -> &I {
&self.insts[insn.get() as usize]
}
fn get_insn_mut(&mut self, insn: InstIx) -> &mut I {
&mut self.insts[insn.get() as usize]
}
fn blocks(&self) -> Range<BlockIx> {
Range::new(BlockIx::new(0), self.block_ranges.len())
}
fn entry_block(&self) -> BlockIx {
BlockIx::new(self.entry)
}
fn block_insns(&self, block: BlockIx) -> Range<InstIx> {
let (start, end) = self.block_ranges[block.get() as usize];
Range::new(InstIx::new(start), (end - start) as usize)
}
fn block_succs(&self, block: BlockIx) -> Cow<[BlockIx]> {
let (start, end) = self.block_succ_range[block.get() as usize];
Cow::Borrowed(&self.block_succs[start..end])
}
fn is_ret(&self, insn: InstIx) -> bool {
match self.insts[insn.get() as usize].is_term() {
MachTerminator::Ret => true,
_ => false,
}
}
fn get_regs(insn: &I, collector: &mut RegUsageCollector) {
insn.get_regs(collector)
}
fn map_regs<RUM: RegUsageMapper>(insn: &mut I, mapper: &RUM) {
insn.map_regs(mapper);
}
fn is_move(&self, insn: &I) -> Option<(Writable<Reg>, Reg)> {
insn.is_move()
}
fn get_num_vregs(&self) -> usize {
self.vreg_types.len()
}
fn get_spillslot_size(&self, regclass: RegClass, vreg: VirtualReg) -> u32 {
let ty = self.vreg_type(vreg);
self.abi.get_spillslot_size(regclass, ty)
}
fn gen_spill(&self, to_slot: SpillSlot, from_reg: RealReg, vreg: Option<VirtualReg>) -> I {
let ty = vreg.map(|v| self.vreg_type(v));
self.abi.gen_spill(to_slot, from_reg, ty)
}
fn gen_reload(
&self,
to_reg: Writable<RealReg>,
from_slot: SpillSlot,
vreg: Option<VirtualReg>,
) -> I {
let ty = vreg.map(|v| self.vreg_type(v));
self.abi.gen_reload(to_reg, from_slot, ty)
}
fn gen_move(&self, to_reg: Writable<RealReg>, from_reg: RealReg, vreg: VirtualReg) -> I {
let ty = self.vreg_type(vreg);
I::gen_move(to_reg.map(|r| r.to_reg()), from_reg.to_reg(), ty)
}
fn gen_zero_len_nop(&self) -> I {
I::gen_zero_len_nop()
}
fn maybe_direct_reload(&self, insn: &I, reg: VirtualReg, slot: SpillSlot) -> Option<I> {
insn.maybe_direct_reload(reg, slot)
}
fn func_liveins(&self) -> RegallocSet<RealReg> {
self.liveins.clone()
}
fn func_liveouts(&self) -> RegallocSet<RealReg> {
self.liveouts.clone()
}
}
impl<I: VCodeInst> fmt::Debug for VCode<I> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
writeln!(f, "VCode_Debug {{")?;
writeln!(f, " Entry block: {}", self.entry)?;
for block in 0..self.num_blocks() {
writeln!(f, "Block {}:", block,)?;
for succ in self.succs(block as BlockIndex) {
writeln!(f, " (successor: Block {})", succ.get())?;
}
let (start, end) = self.block_ranges[block];
writeln!(f, " (instruction range: {} .. {})", start, end)?;
for inst in start..end {
writeln!(f, " Inst {}: {:?}", inst, self.insts[inst as usize])?;
}
}
writeln!(f, "}}")?;
Ok(())
}
}
impl<I: VCodeInst> ShowWithRRU for VCode<I> {
fn show_rru(&self, mb_rru: Option<&RealRegUniverse>) -> String {
use std::fmt::Write;
let mut s = String::new();
write!(&mut s, "VCode_ShowWithRRU {{{{\n").unwrap();
write!(&mut s, " Entry block: {}\n", self.entry).unwrap();
let mut state = Default::default();
let mut safepoint_idx = 0;
for i in 0..self.num_blocks() {
let block = i as BlockIndex;
write!(&mut s, "Block {}:\n", block).unwrap();
if let Some(bb) = self.bindex_to_bb(block) {
write!(&mut s, " (original IR block: {})\n", bb).unwrap();
}
for succ in self.succs(block) {
write!(&mut s, " (successor: Block {})\n", succ.get()).unwrap();
}
let (start, end) = self.block_ranges[block as usize];
write!(&mut s, " (instruction range: {} .. {})\n", start, end).unwrap();
for inst in start..end {
if safepoint_idx < self.safepoint_insns.len()
&& self.safepoint_insns[safepoint_idx] == inst
{
write!(
&mut s,
" (safepoint: slots {:?} with EmitState {:?})\n",
self.safepoint_slots[safepoint_idx], state,
)
.unwrap();
safepoint_idx += 1;
}
write!(
&mut s,
" Inst {}: {}\n",
inst,
self.insts[inst as usize].pretty_print(mb_rru, &mut state)
)
.unwrap();
}
}
write!(&mut s, "}}}}\n").unwrap();
s
}
}