Heap: Rewrite kmalloc to use Option<HeapBlock*> instead of nullable pointers to iterate over the heap
All checks were successful
continuous-integration/drone/push Build is passing
All checks were successful
continuous-integration/drone/push Build is passing
At some point, this should be done inside LinkedList itself, but we have no such thing as break in for_each(). It's iterate over everything or nothing. This also requires operator= in Option, might be also added to Result in the future.
This commit is contained in:
parent
3a3968b268
commit
4081186b27
@ -199,45 +199,50 @@ Result<void*> kmalloc(usize size, bool should_scrub)
|
|||||||
heap.append(block);
|
heap.append(block);
|
||||||
}
|
}
|
||||||
|
|
||||||
HeapBlock* block = heap.expect_first();
|
Option<HeapBlock*> block = heap.first();
|
||||||
while (block)
|
while (block.has_value())
|
||||||
{
|
{
|
||||||
|
HeapBlock* const current = block.value();
|
||||||
// Trying to find a free block...
|
// Trying to find a free block...
|
||||||
if (is_block_free(block))
|
if (is_block_free(current))
|
||||||
{
|
{
|
||||||
if (block->full_size < size)
|
if (current->full_size < size)
|
||||||
{
|
{
|
||||||
block = heap.next(block).value_or(nullptr);
|
block = heap.next(current);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
break; // We found a free block that's big enough!!
|
break; // We found a free block that's big enough!!
|
||||||
}
|
}
|
||||||
auto rc = split(block, size);
|
auto rc = split(current, size);
|
||||||
if (rc.has_value())
|
if (rc.has_value())
|
||||||
{
|
{
|
||||||
block = rc.value(); // We managed to get a free block from a larger used block!!
|
block = rc.value(); // We managed to get a free block from a larger used block!!
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
block = heap.next(block).value_or(nullptr);
|
block = heap.next(current);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!block) // No free blocks, let's allocate a new one
|
if (!block.has_value()) // No free blocks, let's allocate a new one
|
||||||
{
|
{
|
||||||
usize pages = get_pages_for_allocation(size + sizeof(HeapBlock));
|
usize pages = get_pages_for_allocation(size + sizeof(HeapBlock));
|
||||||
block = TRY(allocate_pages(pages));
|
HeapBlock* const current = TRY(allocate_pages(pages));
|
||||||
|
|
||||||
block->full_size = (pages * ARCH_PAGE_SIZE) - sizeof(HeapBlock);
|
current->full_size = (pages * ARCH_PAGE_SIZE) - sizeof(HeapBlock);
|
||||||
block->magic = BLOCK_MAGIC;
|
current->magic = BLOCK_MAGIC;
|
||||||
block->status = BLOCK_START_MEM | BLOCK_END_MEM;
|
current->status = BLOCK_START_MEM | BLOCK_END_MEM;
|
||||||
heap.append(block);
|
heap.append(current);
|
||||||
|
|
||||||
|
block = current;
|
||||||
}
|
}
|
||||||
|
|
||||||
block->req_size = size;
|
HeapBlock* const current = block.value();
|
||||||
block->status |= BLOCK_USED;
|
|
||||||
|
|
||||||
if (should_scrub) { memset(get_pointer_from_heap_block(block), KMALLOC_SCRUB_BYTE, size); }
|
current->req_size = size;
|
||||||
|
current->status |= BLOCK_USED;
|
||||||
|
|
||||||
return get_pointer_from_heap_block(block);
|
if (should_scrub) { memset(get_pointer_from_heap_block(current), KMALLOC_SCRUB_BYTE, size); }
|
||||||
|
|
||||||
|
return get_pointer_from_heap_block(current);
|
||||||
}
|
}
|
||||||
|
|
||||||
Result<void> kfree(void* ptr)
|
Result<void> kfree(void* ptr)
|
||||||
@ -361,24 +366,25 @@ void dump_heap_usage()
|
|||||||
}
|
}
|
||||||
usize alloc_total = 0;
|
usize alloc_total = 0;
|
||||||
usize alloc_used = 0;
|
usize alloc_used = 0;
|
||||||
HeapBlock* block = heap.expect_first();
|
auto block = heap.first();
|
||||||
while (block)
|
while (block.has_value())
|
||||||
{
|
{
|
||||||
if (is_block_free(block))
|
HeapBlock* current = block.value();
|
||||||
|
if (is_block_free(current))
|
||||||
{
|
{
|
||||||
kdbgln("- Available block (%p), of size %zu (%s%s)", (void*)block, block->full_size,
|
kdbgln("- Available block (%p), of size %zu (%s%s)", (void*)current, current->full_size,
|
||||||
block->status & BLOCK_START_MEM ? "b" : "-", block->status & BLOCK_END_MEM ? "e" : "-");
|
current->status & BLOCK_START_MEM ? "b" : "-", current->status & BLOCK_END_MEM ? "e" : "-");
|
||||||
alloc_total += block->full_size + sizeof(HeapBlock);
|
alloc_total += current->full_size + sizeof(HeapBlock);
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
kdbgln("- Used block (%p), of size %zu, of which %zu bytes are being used (%s%s)", (void*)block,
|
kdbgln("- Used block (%p), of size %zu, of which %zu bytes are being used (%s%s)", (void*)current,
|
||||||
block->full_size, block->req_size, block->status & BLOCK_START_MEM ? "b" : "-",
|
current->full_size, current->req_size, current->status & BLOCK_START_MEM ? "b" : "-",
|
||||||
block->status & BLOCK_END_MEM ? "e" : "-");
|
current->status & BLOCK_END_MEM ? "e" : "-");
|
||||||
alloc_total += block->full_size + sizeof(HeapBlock);
|
alloc_total += current->full_size + sizeof(HeapBlock);
|
||||||
alloc_used += block->req_size;
|
alloc_used += current->req_size;
|
||||||
}
|
}
|
||||||
block = heap.next(block).value_or(nullptr);
|
block = heap.next(current);
|
||||||
}
|
}
|
||||||
|
|
||||||
kdbgln("-- Total memory allocated for heap: %zu bytes", alloc_total);
|
kdbgln("-- Total memory allocated for heap: %zu bytes", alloc_total);
|
||||||
|
@ -130,7 +130,7 @@ template <typename T> class LinkedList
|
|||||||
T* expect_first()
|
T* expect_first()
|
||||||
{
|
{
|
||||||
check(m_start_node);
|
check(m_start_node);
|
||||||
return m_start_node;
|
return (T*)m_start_node;
|
||||||
}
|
}
|
||||||
|
|
||||||
Option<T*> last()
|
Option<T*> last()
|
||||||
@ -141,7 +141,7 @@ template <typename T> class LinkedList
|
|||||||
T* expect_last()
|
T* expect_last()
|
||||||
{
|
{
|
||||||
check(m_end_node);
|
check(m_end_node);
|
||||||
return m_end_node;
|
return (T*)m_end_node;
|
||||||
}
|
}
|
||||||
|
|
||||||
Option<T*> next(T* item)
|
Option<T*> next(T* item)
|
||||||
|
@ -30,6 +30,31 @@ template <typename T> class Option
|
|||||||
if (m_has_value) { m_storage.store_moved_reference(move(other.m_storage.fetch_reference())); }
|
if (m_has_value) { m_storage.store_moved_reference(move(other.m_storage.fetch_reference())); }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Option<T>& operator=(const Option<T>& other)
|
||||||
|
{
|
||||||
|
if (this == &other) return *this;
|
||||||
|
|
||||||
|
if (m_has_value) m_storage.destroy();
|
||||||
|
m_has_value = other.m_has_value;
|
||||||
|
|
||||||
|
if (m_has_value) { m_storage.store_reference(other.m_storage.fetch_reference()); }
|
||||||
|
|
||||||
|
return *this;
|
||||||
|
}
|
||||||
|
|
||||||
|
Option<T>& operator=(Option<T>&& other)
|
||||||
|
{
|
||||||
|
if (this == &other) return *this;
|
||||||
|
|
||||||
|
if (m_has_value) m_storage.destroy();
|
||||||
|
m_has_value = other.m_has_value;
|
||||||
|
other.m_has_value = false;
|
||||||
|
|
||||||
|
if (m_has_value) { m_storage.store_moved_reference(move(other.m_storage.fetch_reference())); }
|
||||||
|
|
||||||
|
return *this;
|
||||||
|
}
|
||||||
|
|
||||||
Option() : m_has_value(false)
|
Option() : m_has_value(false)
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user