Skip to content

Commit

Permalink
Merge branch 'master' into gabor/controller-footprint
Browse files Browse the repository at this point in the history
  • Loading branch information
mergify[bot] authored Jul 6, 2023
2 parents bb3ebab + 5334f9b commit 716fedb
Show file tree
Hide file tree
Showing 16 changed files with 82 additions and 83 deletions.
6 changes: 3 additions & 3 deletions nix/sources.json
Original file line number Diff line number Diff line change
Expand Up @@ -64,10 +64,10 @@
"homepage": null,
"owner": "dfinity",
"repo": "motoko-base",
"rev": "4df61399a02e409ebd630f9a6d9ac7de49acbea3",
"sha256": "1yd6s5aa2wp113kndnygfkzfga12kfrixi9mxvwllzfhgi9kcpfl",
"rev": "f842b4a7ea8c968377c6fe7a7df7bd1519c1ca56",
"sha256": "1r65il7j3ppadpkznms32xvyl6qlp108d4c7p14nq8g3xrh7f2pv",
"type": "tarball",
"url": "https://github.com/dfinity/motoko-base/archive/4df61399a02e409ebd630f9a6d9ac7de49acbea3.tar.gz",
"url": "https://github.com/dfinity/motoko-base/archive/f842b4a7ea8c968377c6fe7a7df7bd1519c1ca56.tar.gz",
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
},
"motoko-matchers": {
Expand Down
7 changes: 0 additions & 7 deletions rts/motoko-rts-tests/Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions rts/motoko-rts-tests/src/gc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -444,7 +444,7 @@ fn check_continuation_table(mut offset: usize, continuation_table: &[ObjectIdx],
impl GC {
#[non_incremental_gc]
fn run(&self, heap: &mut MotokoHeap, _round: usize) -> bool {
let heap_base = heap.heap_base_address() as u32;
let heap_base = heap.heap_base_address();
let static_roots = Value::from_ptr(heap.static_root_array_address());
let continuation_table_ptr_address = heap.continuation_table_ptr_address() as *mut Value;

Expand Down Expand Up @@ -505,7 +505,7 @@ impl GC {
};
unsafe {
REMEMBERED_SET = Some(RememberedSet::new(heap));
LAST_HP = heap_1.last_ptr_address() as u32;
LAST_HP = heap_1.last_ptr_address();

let limits = motoko_rts::gc::generational::Limits {
base: heap_base as usize,
Expand Down
4 changes: 2 additions & 2 deletions rts/motoko-rts/src/barriers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ pub unsafe fn init_with_barrier<M: Memory>(_mem: &mut M, location: *mut Value, v
#[non_incremental_gc]
pub unsafe fn init_with_barrier<M: Memory>(mem: &mut M, location: *mut Value, value: Value) {
*location = value;
crate::gc::generational::write_barrier::post_write_barrier(mem, location as u32);
crate::gc::generational::write_barrier::post_write_barrier(mem, location as usize);
}

#[incremental_gc]
Expand All @@ -20,7 +20,7 @@ pub unsafe fn write_with_barrier<M: Memory>(mem: &mut M, location: *mut Value, v
#[non_incremental_gc]
pub unsafe fn write_with_barrier<M: Memory>(mem: &mut M, location: *mut Value, value: Value) {
*location = value;
crate::gc::generational::write_barrier::post_write_barrier(mem, location as u32);
crate::gc::generational::write_barrier::post_write_barrier(mem, location as usize);
}

#[incremental_gc]
Expand Down
8 changes: 4 additions & 4 deletions rts/motoko-rts/src/gc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,15 +12,15 @@ use motoko_rts_macros::*;
#[cfg(feature = "ic")]
#[non_incremental_gc]
unsafe fn should_do_gc(max_live: crate::types::Bytes<u64>) -> bool {
use crate::memory::ic::linear_memory::{getHP, LAST_HP};
use crate::memory::ic::linear_memory::{get_hp_unskewed, LAST_HP};

// A factor of last heap size. We allow at most this much allocation before doing GC.
const HEAP_GROWTH_FACTOR: f64 = 1.5;

let heap_limit = core::cmp::min(
(f64::from(LAST_HP) * HEAP_GROWTH_FACTOR) as u64,
(u64::from(LAST_HP) + max_live.0) / 2,
(f64::from(LAST_HP as u32) * HEAP_GROWTH_FACTOR) as u64,
(u64::from(LAST_HP as u32) + max_live.0) / 2,
);

u64::from(getHP()) >= heap_limit
u64::from(get_hp_unskewed() as u32) >= heap_limit
}
12 changes: 6 additions & 6 deletions rts/motoko-rts/src/gc/copying.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,9 +31,9 @@ unsafe fn copying_gc<M: Memory>(mem: &mut M) {
mem,
ic::get_aligned_heap_base(),
// get_hp
|| linear_memory::getHP() as usize,
|| linear_memory::get_hp_unskewed(),
// set_hp
|hp| linear_memory::setHP(hp),
|hp| linear_memory::set_hp_unskewed(hp),
ic::get_static_roots(),
crate::continuation_table::continuation_table_loc(),
// note_live_size
Expand All @@ -42,18 +42,18 @@ unsafe fn copying_gc<M: Memory>(mem: &mut M) {
|reclaimed| linear_memory::RECLAIMED += Bytes(u64::from(reclaimed.as_u32())),
);

linear_memory::LAST_HP = linear_memory::getHP();
linear_memory::LAST_HP = linear_memory::get_hp_unskewed();
}

pub unsafe fn copying_gc_internal<
M: Memory,
GetHp: Fn() -> usize,
SetHp: FnMut(u32),
SetHp: FnMut(usize),
NoteLiveSize: Fn(Bytes<u32>),
NoteReclaimed: Fn(Bytes<u32>),
>(
mem: &mut M,
heap_base: u32,
heap_base: usize,
get_hp: GetHp,
mut set_hp: SetHp,
static_roots: Value,
Expand Down Expand Up @@ -105,7 +105,7 @@ pub unsafe fn copying_gc_internal<

// Reset the heap pointer
let new_hp = begin_from_space + (end_to_space - begin_to_space);
set_hp(new_hp as u32);
set_hp(new_hp);
}

/// Evacuate (copy) an object in from-space to to-space.
Expand Down
10 changes: 5 additions & 5 deletions rts/motoko-rts/src/gc/generational.rs
Original file line number Diff line number Diff line change
Expand Up @@ -83,17 +83,17 @@ unsafe fn get_limits() -> Limits {
use crate::memory::ic::{self, linear_memory};
assert!(linear_memory::LAST_HP >= ic::get_aligned_heap_base());
Limits {
base: ic::get_aligned_heap_base() as usize,
last_free: linear_memory::LAST_HP as usize,
free: linear_memory::getHP() as usize,
base: ic::get_aligned_heap_base(),
last_free: linear_memory::LAST_HP,
free: (linear_memory::get_hp_unskewed()),
}
}

#[cfg(feature = "ic")]
unsafe fn set_limits(limits: &Limits) {
use crate::memory::ic::linear_memory;
linear_memory::setHP(limits.free as u32);
linear_memory::LAST_HP = limits.free as u32;
linear_memory::set_hp_unskewed(limits.free);
linear_memory::LAST_HP = limits.free;
}

#[cfg(feature = "ic")]
Expand Down
10 changes: 5 additions & 5 deletions rts/motoko-rts/src/gc/generational/write_barrier.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,8 @@ use crate::types::Value;
use motoko_rts_macros::ic_mem_fn;

pub static mut REMEMBERED_SET: Option<RememberedSet> = None;
pub static mut HEAP_BASE: u32 = 0;
pub static mut LAST_HP: u32 = 0;
pub static mut HEAP_BASE: usize = 0;
pub static mut LAST_HP: usize = 0;

#[cfg(feature = "ic")]
/// (Re-)initialize the write barrier for generational GC.
Expand All @@ -24,22 +24,22 @@ pub(crate) unsafe fn init_generational_write_barrier<M: Memory>(mem: &mut M) {
/// As the barrier is called after the write, `*location` refers to the NEW value.
/// No effect if the write barrier is deactivated.
#[ic_mem_fn]
pub unsafe fn post_write_barrier<M: Memory>(mem: &mut M, location: u32) {
pub unsafe fn post_write_barrier<M: Memory>(mem: &mut M, location: usize) {
// Must be an unskewed address.
debug_assert_eq!(location & 0b1, 0);
// Checks have been optimized according to the frequency of occurrence.
// Only record locations inside old generation. Static roots are anyway marked by GC.
if location < LAST_HP {
// Nested ifs are more efficient when counting instructions on IC (explicit return counts as an instruction).
let value = *(location as *mut Value);
if value.points_to_or_beyond(LAST_HP as usize) {
if value.points_to_or_beyond(LAST_HP) {
#[allow(clippy::collapsible_if)]
if location >= HEAP_BASE {
// Trap pointers that lead from old generation (or static roots) to young generation.
REMEMBERED_SET
.as_mut()
.unwrap()
.insert(mem, Value::from_raw(location));
.insert(mem, Value::from_raw(location as u32));
}
}
}
Expand Down
4 changes: 2 additions & 2 deletions rts/motoko-rts/src/gc/incremental.rs
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ pub mod time;
#[ic_mem_fn(ic_only)]
unsafe fn initialize_incremental_gc<M: Memory>(mem: &mut M) {
use crate::memory::ic;
IncrementalGC::<M>::initialize(mem, ic::get_aligned_heap_base() as usize);
IncrementalGC::<M>::initialize(mem, ic::get_aligned_heap_base());
}

#[ic_mem_fn(ic_only)]
Expand Down Expand Up @@ -101,7 +101,7 @@ unsafe fn record_gc_stop<M: Memory>() {
use crate::memory::ic::{self, partitioned_memory};

let heap_size = partitioned_memory::get_heap_size();
let static_size = Bytes(ic::get_aligned_heap_base());
let static_size = Bytes(ic::get_aligned_heap_base() as u32);
debug_assert!(heap_size >= static_size);
let dynamic_size = heap_size - static_size;
ic::MAX_LIVE = ::core::cmp::max(ic::MAX_LIVE, dynamic_size);
Expand Down
48 changes: 24 additions & 24 deletions rts/motoko-rts/src/gc/mark_compact.rs
Original file line number Diff line number Diff line change
Expand Up @@ -46,9 +46,9 @@ unsafe fn compacting_gc<M: Memory>(mem: &mut M) {
mem,
ic::get_aligned_heap_base(),
// get_hp
|| linear_memory::getHP() as usize,
|| linear_memory::get_hp_unskewed(),
// set_hp
|hp| linear_memory::setHP(hp),
|hp| linear_memory::set_hp_unskewed(hp),
ic::get_static_roots(),
crate::continuation_table::continuation_table_loc(),
// note_live_size
Expand All @@ -57,18 +57,18 @@ unsafe fn compacting_gc<M: Memory>(mem: &mut M) {
|reclaimed| linear_memory::RECLAIMED += Bytes(u64::from(reclaimed.as_u32())),
);

linear_memory::LAST_HP = linear_memory::getHP();
linear_memory::LAST_HP = linear_memory::get_hp_unskewed();
}

pub unsafe fn compacting_gc_internal<
M: Memory,
GetHp: Fn() -> usize,
SetHp: Fn(u32),
SetHp: Fn(usize),
NoteLiveSize: Fn(Bytes<u32>),
NoteReclaimed: Fn(Bytes<u32>),
>(
mem: &mut M,
heap_base: u32,
heap_base: usize,
get_hp: GetHp,
set_hp: SetHp,
static_roots: Value,
Expand All @@ -92,21 +92,21 @@ pub unsafe fn compacting_gc_internal<
let reclaimed = old_hp - (get_hp() as u32);
note_reclaimed(Bytes(reclaimed));

let live = get_hp() as u32 - heap_base;
note_live_size(Bytes(live));
let live = get_hp() - heap_base;
note_live_size(Bytes(live as u32));
}

unsafe fn mark_compact<M: Memory, SetHp: Fn(u32)>(
unsafe fn mark_compact<M: Memory, SetHp: Fn(usize)>(
mem: &mut M,
set_hp: SetHp,
heap_base: u32,
heap_base: usize,
heap_end: u32,
static_roots: Value,
continuation_table_ptr_loc: *mut Value,
) {
let mem_size = Bytes(heap_end - heap_base);
let mem_size = Bytes(heap_end - heap_base as u32);

alloc_bitmap(mem, mem_size, heap_base / WORD_SIZE);
alloc_bitmap(mem, mem_size, heap_base as u32 / WORD_SIZE);
alloc_mark_stack(mem);

mark_static_roots(mem, static_roots, heap_base);
Expand All @@ -126,15 +126,15 @@ unsafe fn mark_compact<M: Memory, SetHp: Fn(u32)>(
free_bitmap();
}

unsafe fn mark_static_roots<M: Memory>(mem: &mut M, static_roots: Value, heap_base: u32) {
unsafe fn mark_static_roots<M: Memory>(mem: &mut M, static_roots: Value, heap_base: usize) {
let root_array = static_roots.as_array();

// Static objects are not in the dynamic heap so don't need marking.
for i in 0..root_array.len() {
let obj = root_array.get(i).as_obj();
// Root array should only have pointers to other static MutBoxes
debug_assert_eq!(obj.tag(), TAG_MUTBOX); // check tag
debug_assert!((obj as u32) < heap_base); // check that MutBox is static
debug_assert!((obj as usize) < heap_base); // check that MutBox is static
mark_root_mutbox_fields(mem, obj as *mut MutBox, heap_base);
}
}
Expand All @@ -157,18 +157,18 @@ unsafe fn mark_object<M: Memory>(mem: &mut M, obj: Value) {
push_mark_stack(mem, obj as usize, obj_tag);
}

unsafe fn mark_stack<M: Memory>(mem: &mut M, heap_base: u32) {
unsafe fn mark_stack<M: Memory>(mem: &mut M, heap_base: usize) {
while let Some((obj, tag)) = pop_mark_stack() {
mark_fields(mem, obj as *mut Obj, tag, heap_base)
}
}

unsafe fn mark_fields<M: Memory>(mem: &mut M, obj: *mut Obj, obj_tag: Tag, heap_base: u32) {
unsafe fn mark_fields<M: Memory>(mem: &mut M, obj: *mut Obj, obj_tag: Tag, heap_base: usize) {
visit_pointer_fields(
mem,
obj,
obj_tag,
heap_base as usize,
heap_base,
|mem, field_addr| {
let field_value = *field_addr;
mark_object(mem, field_value);
Expand All @@ -194,9 +194,9 @@ unsafe fn mark_fields<M: Memory>(mem: &mut M, obj: *mut Obj, obj_tag: Tag, heap_
}

/// Specialized version of `mark_fields` for root `MutBox`es.
unsafe fn mark_root_mutbox_fields<M: Memory>(mem: &mut M, mutbox: *mut MutBox, heap_base: u32) {
unsafe fn mark_root_mutbox_fields<M: Memory>(mem: &mut M, mutbox: *mut MutBox, heap_base: usize) {
let field_addr = &mut (*mutbox).field;
if pointer_to_dynamic_heap(field_addr, heap_base as usize) {
if pointer_to_dynamic_heap(field_addr, heap_base) {
mark_object(mem, *field_addr);
// It's OK to thread forward pointers here as the static objects won't be moved, so we will
// be able to unthread objects pointed by these fields later.
Expand All @@ -213,7 +213,7 @@ unsafe fn mark_root_mutbox_fields<M: Memory>(mem: &mut M, mutbox: *mut MutBox, h
///
/// - Thread forward pointers of the object
///
unsafe fn update_refs<SetHp: Fn(u32)>(set_hp: SetHp, heap_base: u32) {
unsafe fn update_refs<SetHp: Fn(usize)>(set_hp: SetHp, heap_base: usize) {
let mut free = heap_base;

let mut bitmap_iter = iter_bits();
Expand All @@ -236,7 +236,7 @@ unsafe fn update_refs<SetHp: Fn(u32)>(set_hp: SetHp, heap_base: u32) {
debug_assert!(new_obj.tag() >= TAG_OBJECT && new_obj.tag() <= TAG_NULL);
}

free += p_size_words.to_bytes().as_u32();
free += p_size_words.to_bytes().as_usize();

// Thread forward pointers of the object
thread_fwd_pointers(p_new as *mut Obj, heap_base);
Expand All @@ -248,12 +248,12 @@ unsafe fn update_refs<SetHp: Fn(u32)>(set_hp: SetHp, heap_base: u32) {
}

/// Thread forward pointers in object
unsafe fn thread_fwd_pointers(obj: *mut Obj, heap_base: u32) {
unsafe fn thread_fwd_pointers(obj: *mut Obj, heap_base: usize) {
visit_pointer_fields(
&mut (),
obj,
obj.tag(),
heap_base as usize,
heap_base,
|_, field_addr| {
if (*field_addr).get_ptr() > obj as usize {
thread(field_addr)
Expand All @@ -273,14 +273,14 @@ unsafe fn thread(field: *mut Value) {
}

/// Unthread all references at given header, replacing with `new_loc`. Restores object header.
unsafe fn unthread(obj: *mut Obj, new_loc: u32) {
unsafe fn unthread(obj: *mut Obj, new_loc: usize) {
let mut header = obj.tag();

// All objects and fields are word-aligned, and tags have the lowest bit set, so use the lowest
// bit to distinguish a header (tag) from a field address.
while header & 0b1 == 0 {
let tmp = (header as *const Obj).tag();
(*(header as *mut Value)) = Value::from_ptr(new_loc as usize);
(*(header as *mut Value)) = Value::from_ptr(new_loc);
header = tmp;
}

Expand Down
4 changes: 2 additions & 2 deletions rts/motoko-rts/src/memory/ic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,11 @@ use motoko_rts_macros::*;

// Provided by generated code
extern "C" {
fn get_heap_base() -> u32;
fn get_heap_base() -> usize;
pub(crate) fn get_static_roots() -> Value;
}

pub(crate) unsafe fn get_aligned_heap_base() -> u32 {
pub(crate) unsafe fn get_aligned_heap_base() -> usize {
// align to 32 bytes
((get_heap_base() + 31) / 32) * 32
}
Expand Down
Loading

0 comments on commit 716fedb

Please sign in to comment.