Skip to content

Commit

Permalink
Continuing to refactor
Browse files Browse the repository at this point in the history
  • Loading branch information
jimmyhmiller committed Aug 9, 2024
1 parent f47458e commit e948894
Show file tree
Hide file tree
Showing 4 changed files with 24 additions and 95 deletions.
10 changes: 2 additions & 8 deletions src/gc/compacting.rs
Original file line number Diff line number Diff line change
Expand Up @@ -123,15 +123,10 @@ impl Space {
pointer
}

fn write_object(
&mut self,
segment_offset: usize,
offset: usize,
size: Word,
) -> *const u8 {
fn write_object(&mut self, segment_offset: usize, offset: usize, size: Word) -> *const u8 {
let memory = &mut self.segments[segment_offset].memory;

let mut heap_object = HeapObject::from_untagged(unsafe { memory.as_ptr().add(offset)});
let mut heap_object = HeapObject::from_untagged(unsafe { memory.as_ptr().add(offset) });
heap_object.write_header(size);

heap_object.get_pointer()
Expand Down Expand Up @@ -320,7 +315,6 @@ impl CompactingHeap {
}

unsafe fn copy_using_cheneys_algorithm(&mut self, root: usize) -> usize {

let heap_object = HeapObject::from_tagged(root);

// if the first field is in the to space, we have already
Expand Down
69 changes: 10 additions & 59 deletions src/gc/simple_generation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -42,36 +42,6 @@ struct Space {
segment_size: usize,
}

struct ObjectIterator {
space: *const Space,
segment_index: usize,
offset: usize,
}

impl Iterator for ObjectIterator {
type Item = *const u8;

fn next(&mut self) -> Option<Self::Item> {
let space = unsafe { &*self.space };
if self.offset >= space.segments[self.segment_index].offset {
self.segment_index += 1;
self.offset = 0;
}
if self.segment_index == space.segments.len() {
return None;
}
let segment = &space.segments[self.segment_index];
if segment.offset == 0 {
return None;
}
let pointer = unsafe { segment.memory.as_ptr().add(self.offset) };
let size = unsafe { *pointer.cast::<usize>() };

self.offset += (size >> 1) + 8;
Some(pointer)
}
}

impl Space {
fn new(segment_size: usize) -> Self {
let space = vec![Segment::new(segment_size)];
Expand All @@ -91,12 +61,7 @@ impl Space {
false
}

fn write_object(
&mut self,
segment_offset: usize,
offset: usize,
size: Word,
) -> *const u8 {
fn write_object(&mut self, segment_offset: usize, offset: usize, size: Word) -> *const u8 {
let memory = &mut self.segments[segment_offset].memory;
let mut heap_object = HeapObject::from_untagged(unsafe { memory.as_ptr().add(offset) });
heap_object.write_header(size);
Expand Down Expand Up @@ -322,21 +287,16 @@ impl SimpleGeneration {
new_roots
}

// TODO: Finish this
unsafe fn copy(&mut self, root: usize) -> usize {
// I could make this check the memory range.
// In the original it does. But I don't think I have to?

let untagged = BuiltInTypes::untag(root);
let pointer = untagged as *mut u8;
let heap_object = HeapObject::from_tagged(root);

if !self.young.contains(pointer) {
if !self.young.contains(heap_object.get_pointer()) {
return root;
}

// If it is marked, we have copied it already
// the first 8 bytes are a tagged forward pointer
let first_field = *(pointer.add(8).cast::<usize>());
// If the first field points into the old generation, we can just return the pointer
// because this is a forwarding pointer.
let first_field = heap_object.get_field(0);
if BuiltInTypes::is_heap_pointer(first_field) {
let untagged_data = BuiltInTypes::untag(first_field);
if !self.young.contains(untagged_data as *const u8) {
Expand All @@ -345,20 +305,16 @@ impl SimpleGeneration {
}
}

let size = *(pointer as *const usize) >> 1;
let data = std::slice::from_raw_parts(pointer as *const u8, size + 8);
let data = heap_object.get_full_object_data();
let new_pointer = self.old.copy_data_to_offset(data);
debug_assert!(new_pointer as usize % 8 == 0, "Pointer is not aligned");
// update header of original object to now be the forwarding pointer
let tagged_new = BuiltInTypes::get_kind(root).tag(new_pointer as isize) as usize;

for (old, young) in self.additional_roots.iter() {
if root == *young {
let untagged = BuiltInTypes::untag(*old);
let object = untagged as *mut u8;
let size: usize = *(object as *const usize) >> 1;

let data = std::slice::from_raw_parts_mut(object.add(8) as *mut usize, size / 8);
let mut object = HeapObject::from_tagged(*old);
let data = object.get_fields_mut();

for datum in data.iter_mut() {
if datum == young {
Expand All @@ -367,12 +323,7 @@ impl SimpleGeneration {
}
}
}
let untagged = BuiltInTypes::untag(root);
let pointer = untagged as *mut u8;
let pointer = pointer.add(8);
*pointer.cast::<usize>() = tagged_new;
let size = *(untagged as *const usize) >> 1;
debug_assert!(size % 8 == 0 && size < 100);
heap_object.write_field(0, tagged_new);
self.copied.push(HeapObject::from_untagged(new_pointer));
tagged_new
}
Expand Down
31 changes: 8 additions & 23 deletions src/runtime.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ use crate::{
ir::{StringValue, Value},
CommandLineArguments, Data, Message, __pause,
parser::Parser,
types::BuiltInTypes,
types::{BuiltInTypes, HeapObject},
};

#[derive(Debug, Clone, PartialEq, Eq)]
Expand Down Expand Up @@ -635,23 +635,11 @@ impl Compiler {
BuiltInTypes::HeapObject => {
// TODO: Once I change the setup for heap objects
// I need to figure out what kind of heap object I have
unsafe {
let value = BuiltInTypes::untag(value);
let pointer = value as *const u8;

if pointer as usize % 8 != 0 {
panic!("Not aligned");
}
// get first 8 bytes as size le encoded
let size = *(pointer as *const usize) >> 1;
let pointer = pointer.add(8);
let data = std::slice::from_raw_parts(pointer, size);
// type id is the first 8 bytes of data
let type_id = usize::from_le_bytes(data[0..8].try_into().unwrap());
let type_id = BuiltInTypes::untag(type_id);
let struct_value = self.structs.get_by_id(type_id as usize);
Some(self.get_struct_repr(struct_value?, data[8..].to_vec(), depth + 1)?)
}
let object = HeapObject::from_tagged(value);
// TODO: abstract over this (memory-layout)
let type_id = BuiltInTypes::untag(object.get_field(0));
let struct_value = self.structs.get_by_id(type_id as usize);
Some(self.get_struct_repr(struct_value?, &object.get_fields()[1..], depth + 1)?)
}
}
}
Expand Down Expand Up @@ -750,7 +738,7 @@ impl Compiler {
fn get_struct_repr(
&self,
struct_value: &Struct,
to_vec: Vec<u8>,
fields: &[usize],
depth: usize,
) -> Option<String> {
// It should look like this
Expand All @@ -760,10 +748,7 @@ impl Compiler {
for (index, field) in struct_value.fields.iter().enumerate() {
repr.push_str(field);
repr.push_str(": ");
let value = &to_vec[index * 8..index * 8 + 8];
let mut bytes = [0u8; 8];
bytes.copy_from_slice(value);
let value = usize::from_le_bytes(bytes);
let value = fields[index];
repr.push_str(&self.get_repr(value, depth + 1)?);
if index != struct_value.fields.len() - 1 {
repr.push_str(", ");
Expand Down
9 changes: 4 additions & 5 deletions src/types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -129,8 +129,7 @@ pub struct HeapObject {
tagged: bool,
}

const SIZE_SHIFT : usize = 1;

const SIZE_SHIFT: usize = 1;

// TODO: Implement methods for writing the header of the heap object
// make sure we always use this representation everywhere so we can
Expand Down Expand Up @@ -256,7 +255,7 @@ impl HeapObject {
std::ptr::copy_nonoverlapping(data.as_ptr(), pointer, data.len());
}
}

pub fn get_pointer(&self) -> *const u8 {
let untagged = self.untagged();
untagged as *const u8
Expand All @@ -268,8 +267,8 @@ impl HeapObject {
let pointer = unsafe { pointer.add(arg as usize + Self::header_size() / 8) };
unsafe { *pointer = tagged_new };
}
pub fn get_field(&self, arg: i32) -> usize {

pub fn get_field(&self, arg: usize) -> usize {
let untagged = self.untagged();
let pointer = untagged as *mut usize;
let pointer = unsafe { pointer.add(arg as usize + Self::header_size() / 8) };
Expand Down

0 comments on commit e948894

Please sign in to comment.