diff --git a/Cargo.toml b/Cargo.toml index 0793ca3b13..bbbc1b8749 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -110,6 +110,8 @@ immix_non_moving = [] # if `immix_non_moving` is in use. sticky_immix_non_moving_nursery = [] +# Statistics about how many objects get moved in defrag GCs for Immix +objects_moved_stats = [] # Reduce block size for ImmixSpace. This mitigates fragmentation when defrag is disabled. immix_smaller_block = [] diff --git a/src/policy/immix/immixspace.rs b/src/policy/immix/immixspace.rs index 3809f7bd24..40da8ef508 100644 --- a/src/policy/immix/immixspace.rs +++ b/src/policy/immix/immixspace.rs @@ -3,6 +3,8 @@ use super::line::*; use super::{block::*, defrag::Defrag}; use crate::plan::VectorObjectQueue; use crate::policy::gc_work::{TraceKind, TRACE_KIND_TRANSITIVE_PIN}; +#[cfg(feature = "objects_moved_stats")] +use crate::policy::immix::IMMIXSPACE_OBJECTS_MARKED; use crate::policy::sft::GCWorkerMutRef; use crate::policy::sft::SFT; use crate::policy::sft_map::SFTMap; @@ -17,6 +19,7 @@ use crate::util::linear_scan::{Region, RegionIterator}; use crate::util::metadata::side_metadata::SideMetadataSpec; #[cfg(feature = "vo_bit")] use crate::util::metadata::vo_bit; + use crate::util::metadata::{self, MetadataSpec}; use crate::util::object_forwarding; use crate::util::{Address, ObjectReference}; @@ -711,6 +714,11 @@ impl ImmixSpace { break; } } + + #[cfg(feature = "objects_moved_stats")] + unsafe { + IMMIXSPACE_OBJECTS_MARKED.fetch_add(1, Ordering::SeqCst); + } true } diff --git a/src/policy/immix/mod.rs b/src/policy/immix/mod.rs index 32c94261e9..363f195eb7 100644 --- a/src/policy/immix/mod.rs +++ b/src/policy/immix/mod.rs @@ -7,6 +7,8 @@ pub use immixspace::*; use crate::policy::immix::block::Block; use crate::util::linear_scan::Region; +#[cfg(feature = "objects_moved_stats")] +use std::sync::atomic::AtomicUsize; /// The max object size for immix: half of a block pub const MAX_IMMIX_OBJECT_SIZE: usize = Block::BYTES >> 1; @@ -38,6 +40,9 @@ pub const NEVER_MOVE_OBJECTS: bool = !DEFRAG && !PREFER_COPY_ON_NURSERY_GC; /// Otherwise, do it at mark time. pub const MARK_LINE_AT_SCAN_TIME: bool = true; +#[cfg(feature = "objects_moved_stats")] +pub static mut IMMIXSPACE_OBJECTS_MARKED: AtomicUsize = AtomicUsize::new(0); + macro_rules! validate { ($x: expr) => { assert!($x, stringify!($x)) }; ($x: expr => $y: expr) => { if $x { assert!($y, stringify!($x implies $y)) } }; diff --git a/src/policy/markcompactspace.rs b/src/policy/markcompactspace.rs index 693218b492..d278be41ac 100644 --- a/src/policy/markcompactspace.rs +++ b/src/policy/markcompactspace.rs @@ -5,6 +5,8 @@ use super::space::{CommonSpace, Space}; use crate::plan::VectorObjectQueue; use crate::policy::gc_work::{TraceKind, TRACE_KIND_TRANSITIVE_PIN}; use crate::policy::sft::GCWorkerMutRef; +#[cfg(feature = "objects_moved_stats")] +use crate::policy::OBJECTS_COPIED; use crate::scheduler::GCWorker; use crate::util::alloc::allocator::align_allocation_no_fill; use crate::util::constants::LOG_BYTES_IN_WORD; @@ -398,6 +400,11 @@ impl MarkCompactSpace { trace!(" copy from {} to {}", obj, new_object); let end_of_new_object = VM::VMObjectModel::copy_to(obj, new_object, Address::ZERO); + + #[cfg(feature = "objects_moved_stats")] + unsafe { + OBJECTS_COPIED.fetch_add(1, Ordering::SeqCst); + } // update VO bit, vo_bit::set_vo_bit::(new_object); to = new_object.to_object_start::() + copied_size; diff --git a/src/policy/mod.rs b/src/policy/mod.rs index c5763a213f..52998c608f 100644 --- a/src/policy/mod.rs +++ b/src/policy/mod.rs @@ -30,3 +30,11 @@ pub mod markcompactspace; pub mod marksweepspace; #[cfg(feature = "vm_space")] pub mod vmspace; + +/// Keep track of stats about # of objects scanned and copied +#[cfg(feature = "objects_moved_stats")] +use std::sync::atomic::AtomicUsize; +#[cfg(feature = "objects_moved_stats")] +pub static mut OBJECTS_COPIED: AtomicUsize = AtomicUsize::new(0); +#[cfg(feature = "objects_moved_stats")] +pub static mut OBJECTS_SCANNED: AtomicUsize = AtomicUsize::new(0); diff --git a/src/scheduler/gc_work.rs b/src/scheduler/gc_work.rs index 88bba5c54c..b4af773e88 100644 --- a/src/scheduler/gc_work.rs +++ b/src/scheduler/gc_work.rs @@ -3,10 +3,16 @@ use super::*; use crate::global_state::GcStatus; use crate::plan::ObjectsClosure; use crate::plan::VectorObjectQueue; +#[cfg(feature = "objects_moved_stats")] +use crate::policy::immix::IMMIXSPACE_OBJECTS_MARKED; +#[cfg(feature = "objects_moved_stats")] +use crate::policy::{OBJECTS_COPIED, OBJECTS_SCANNED}; use crate::util::*; use crate::vm::edge_shape::Edge; use crate::vm::*; use crate::*; +#[cfg(feature = "objects_moved_stats")] +use atomic::Ordering; use std::marker::PhantomData; use std::ops::{Deref, DerefMut}; @@ -226,6 +232,17 @@ impl GCWork for EndOfGC { self.elapsed.as_millis() ); + #[cfg(feature = "objects_moved_stats")] + unsafe { + info!( + "# of objects scanned: {:?}; # of immixspace objects marked: {:?}, # of objects copied: {:?}", + OBJECTS_SCANNED, IMMIXSPACE_OBJECTS_MARKED, OBJECTS_COPIED + ); + OBJECTS_SCANNED.store(0, Ordering::SeqCst); + OBJECTS_COPIED.store(0, Ordering::SeqCst); + IMMIXSPACE_OBJECTS_MARKED.store(0, Ordering::SeqCst); + } + #[cfg(feature = "count_live_bytes_in_gc")] { let live_bytes = mmtk.state.get_live_bytes_in_last_gc(); @@ -836,6 +853,10 @@ pub trait ScanObjectsWork: GCWork + Sized { trace!("Scan object (edge) {}", object); // If an object supports edge-enqueuing, we enqueue its edges. ::VMScanning::scan_object(tls, object, &mut closure); + #[cfg(feature = "objects_moved_stats")] + unsafe { + OBJECTS_SCANNED.fetch_add(1, Ordering::SeqCst); + } self.post_scan_object(object); } else { // If an object does not support edge-enqueuing, we have to use @@ -865,6 +886,10 @@ pub trait ScanObjectsWork: GCWork + Sized { object, object_tracer, ); + #[cfg(feature = "objects_moved_stats")] + unsafe { + OBJECTS_SCANNED.fetch_add(1, Ordering::SeqCst); + } self.post_scan_object(object); } }); diff --git a/src/util/object_forwarding.rs b/src/util/object_forwarding.rs index 01b3fec447..16f2f693a5 100644 --- a/src/util/object_forwarding.rs +++ b/src/util/object_forwarding.rs @@ -18,6 +18,9 @@ const FORWARDING_POINTER_MASK: usize = 0x00ff_ffff_ffff_fff8; #[cfg(target_pointer_width = "32")] const FORWARDING_POINTER_MASK: usize = 0xffff_fffc; +#[cfg(feature = "objects_moved_stats")] +use crate::policy::OBJECTS_COPIED; + /// Attempt to become the worker thread who will forward the object. /// The successful worker will set the object forwarding bits to BEING_FORWARDED, preventing other workers from forwarding the same object. pub fn attempt_to_forward(object: ObjectReference) -> u8 { @@ -80,6 +83,10 @@ pub fn forward_object( copy_context: &mut GCWorkerCopyContext, ) -> ObjectReference { let new_object = VM::VMObjectModel::copy(object, semantics, copy_context); + #[cfg(feature = "objects_moved_stats")] + unsafe { + OBJECTS_COPIED.fetch_add(1, Ordering::SeqCst); + } if let Some(shift) = forwarding_bits_offset_in_forwarding_pointer::() { VM::VMObjectModel::LOCAL_FORWARDING_POINTER_SPEC.store_atomic::( object,