mirror of
https://github.com/LadybirdBrowser/ladybird.git
synced 2025-07-31 21:29:06 +00:00
LibGC+Everywhere: Factor out a LibGC from LibJS
Resulting in a massive rename across almost everywhere! Alongside the namespace change, we now have the following names: * JS::NonnullGCPtr -> GC::Ref * JS::GCPtr -> GC::Ptr * JS::HeapFunction -> GC::Function * JS::CellImpl -> GC::Cell * JS::Handle -> GC::Root
This commit is contained in:
parent
ce23efc5f6
commit
f87041bf3a
Notes:
github-actions[bot]
2024-11-15 13:50:17 +00:00
Author: https://github.com/shannonbooth
Commit: f87041bf3a
Pull-request: https://github.com/LadybirdBrowser/ladybird/pull/2345
1722 changed files with 9939 additions and 9906 deletions
84
Libraries/LibGC/BlockAllocator.cpp
Normal file
84
Libraries/LibGC/BlockAllocator.cpp
Normal file
|
@ -0,0 +1,84 @@
|
|||
/*
|
||||
* Copyright (c) 2021-2023, Andreas Kling <andreas@ladybird.org>
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*/
|
||||
|
||||
#include <AK/Platform.h>
|
||||
#include <AK/Random.h>
|
||||
#include <AK/Vector.h>
|
||||
#include <LibGC/BlockAllocator.h>
|
||||
#include <LibGC/HeapBlock.h>
|
||||
#include <sys/mman.h>
|
||||
|
||||
#ifdef HAS_ADDRESS_SANITIZER
|
||||
# include <sanitizer/asan_interface.h>
|
||||
# include <sanitizer/lsan_interface.h>
|
||||
#endif
|
||||
|
||||
#if defined(AK_OS_GNU_HURD) || (!defined(MADV_FREE) && !defined(MADV_DONTNEED))
|
||||
# define USE_FALLBACK_BLOCK_DEALLOCATION
|
||||
#endif
|
||||
|
||||
namespace GC {
|
||||
|
||||
BlockAllocator::~BlockAllocator()
|
||||
{
|
||||
for (auto* block : m_blocks) {
|
||||
ASAN_UNPOISON_MEMORY_REGION(block, HeapBlock::block_size);
|
||||
if (munmap(block, HeapBlock::block_size) < 0) {
|
||||
perror("munmap");
|
||||
VERIFY_NOT_REACHED();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void* BlockAllocator::allocate_block([[maybe_unused]] char const* name)
|
||||
{
|
||||
if (!m_blocks.is_empty()) {
|
||||
// To reduce predictability, take a random block from the cache.
|
||||
size_t random_index = get_random_uniform(m_blocks.size());
|
||||
auto* block = m_blocks.unstable_take(random_index);
|
||||
ASAN_UNPOISON_MEMORY_REGION(block, HeapBlock::block_size);
|
||||
LSAN_REGISTER_ROOT_REGION(block, HeapBlock::block_size);
|
||||
return block;
|
||||
}
|
||||
|
||||
auto* block = (HeapBlock*)mmap(nullptr, HeapBlock::block_size, PROT_READ | PROT_WRITE, MAP_ANONYMOUS | MAP_PRIVATE, -1, 0);
|
||||
VERIFY(block != MAP_FAILED);
|
||||
LSAN_REGISTER_ROOT_REGION(block, HeapBlock::block_size);
|
||||
return block;
|
||||
}
|
||||
|
||||
void BlockAllocator::deallocate_block(void* block)
|
||||
{
|
||||
VERIFY(block);
|
||||
|
||||
#if defined(USE_FALLBACK_BLOCK_DEALLOCATION)
|
||||
// If we can't use any of the nicer techniques, unmap and remap the block to return the physical pages while keeping the VM.
|
||||
if (munmap(block, HeapBlock::block_size) < 0) {
|
||||
perror("munmap");
|
||||
VERIFY_NOT_REACHED();
|
||||
}
|
||||
if (mmap(block, HeapBlock::block_size, PROT_READ | PROT_WRITE, MAP_ANONYMOUS | MAP_PRIVATE | MAP_FIXED, -1, 0) != block) {
|
||||
perror("mmap");
|
||||
VERIFY_NOT_REACHED();
|
||||
}
|
||||
#elif defined(MADV_FREE)
|
||||
if (madvise(block, HeapBlock::block_size, MADV_FREE) < 0) {
|
||||
perror("madvise(MADV_FREE)");
|
||||
VERIFY_NOT_REACHED();
|
||||
}
|
||||
#elif defined(MADV_DONTNEED)
|
||||
if (madvise(block, HeapBlock::block_size, MADV_DONTNEED) < 0) {
|
||||
perror("madvise(MADV_DONTNEED)");
|
||||
VERIFY_NOT_REACHED();
|
||||
}
|
||||
#endif
|
||||
|
||||
ASAN_POISON_MEMORY_REGION(block, HeapBlock::block_size);
|
||||
LSAN_UNREGISTER_ROOT_REGION(block, HeapBlock::block_size);
|
||||
m_blocks.append(block);
|
||||
}
|
||||
|
||||
}
|
26
Libraries/LibGC/BlockAllocator.h
Normal file
26
Libraries/LibGC/BlockAllocator.h
Normal file
|
@ -0,0 +1,26 @@
|
|||
/*
|
||||
* Copyright (c) 2021-2023, Andreas Kling <andreas@ladybird.org>
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <AK/Vector.h>
|
||||
#include <LibGC/Forward.h>
|
||||
|
||||
namespace GC {
|
||||
|
||||
class BlockAllocator {
|
||||
public:
|
||||
BlockAllocator() = default;
|
||||
~BlockAllocator();
|
||||
|
||||
void* allocate_block(char const* name);
|
||||
void deallocate_block(void*);
|
||||
|
||||
private:
|
||||
Vector<void*> m_blocks;
|
||||
};
|
||||
|
||||
}
|
14
Libraries/LibGC/CMakeLists.txt
Normal file
14
Libraries/LibGC/CMakeLists.txt
Normal file
|
@ -0,0 +1,14 @@
|
|||
set(SOURCES
|
||||
BlockAllocator.cpp
|
||||
Cell.cpp
|
||||
CellAllocator.cpp
|
||||
ConservativeVector.cpp
|
||||
Root.cpp
|
||||
Heap.cpp
|
||||
HeapBlock.cpp
|
||||
MarkedVector.cpp
|
||||
WeakContainer.cpp
|
||||
)
|
||||
|
||||
serenity_lib(LibGC gc)
|
||||
target_link_libraries(LibGC PRIVATE LibCore)
|
18
Libraries/LibGC/Cell.cpp
Normal file
18
Libraries/LibGC/Cell.cpp
Normal file
|
@ -0,0 +1,18 @@
|
|||
/*
|
||||
* Copyright (c) 2020-2022, Andreas Kling <andreas@ladybird.org>
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*/
|
||||
|
||||
#include <LibGC/Cell.h>
|
||||
#include <LibGC/NanBoxedValue.h>
|
||||
|
||||
namespace GC {
|
||||
|
||||
void GC::Cell::Visitor::visit(NanBoxedValue const& value)
|
||||
{
|
||||
if (value.is_cell())
|
||||
visit_impl(value.as_cell());
|
||||
}
|
||||
|
||||
}
|
205
Libraries/LibGC/Cell.h
Normal file
205
Libraries/LibGC/Cell.h
Normal file
|
@ -0,0 +1,205 @@
|
|||
/*
|
||||
* Copyright (c) 2020-2024, Andreas Kling <andreas@ladybird.org>
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <AK/Badge.h>
|
||||
#include <AK/Format.h>
|
||||
#include <AK/Forward.h>
|
||||
#include <AK/HashMap.h>
|
||||
#include <AK/Noncopyable.h>
|
||||
#include <AK/StringView.h>
|
||||
#include <AK/Weakable.h>
|
||||
#include <LibGC/Forward.h>
|
||||
#include <LibGC/Internals.h>
|
||||
#include <LibGC/Ptr.h>
|
||||
|
||||
namespace GC {
|
||||
|
||||
// This instrumentation tells analysis tooling to ignore a potentially mis-wrapped GC-allocated member variable
|
||||
// It should only be used when the lifetime of the GC-allocated member is always longer than the object
|
||||
#if defined(AK_COMPILER_CLANG)
|
||||
# define IGNORE_GC [[clang::annotate("serenity::ignore_gc")]]
|
||||
#else
|
||||
# define IGNORE_GC
|
||||
#endif
|
||||
|
||||
#define GC_CELL(class_, base_class) \
|
||||
public: \
|
||||
using Base = base_class; \
|
||||
virtual StringView class_name() const override \
|
||||
{ \
|
||||
return #class_##sv; \
|
||||
} \
|
||||
friend class GC::Heap;
|
||||
|
||||
class Cell : public Weakable<Cell> {
|
||||
AK_MAKE_NONCOPYABLE(Cell);
|
||||
AK_MAKE_NONMOVABLE(Cell);
|
||||
|
||||
public:
|
||||
virtual ~Cell() = default;
|
||||
|
||||
bool is_marked() const { return m_mark; }
|
||||
void set_marked(bool b) { m_mark = b; }
|
||||
|
||||
enum class State : bool {
|
||||
Live,
|
||||
Dead,
|
||||
};
|
||||
|
||||
State state() const { return m_state; }
|
||||
void set_state(State state) { m_state = state; }
|
||||
|
||||
virtual StringView class_name() const = 0;
|
||||
|
||||
class Visitor {
|
||||
public:
|
||||
void visit(Cell* cell)
|
||||
{
|
||||
if (cell)
|
||||
visit_impl(*cell);
|
||||
}
|
||||
|
||||
void visit(Cell& cell)
|
||||
{
|
||||
visit_impl(cell);
|
||||
}
|
||||
|
||||
void visit(Cell const* cell)
|
||||
{
|
||||
visit(const_cast<Cell*>(cell));
|
||||
}
|
||||
|
||||
void visit(Cell const& cell)
|
||||
{
|
||||
visit(const_cast<Cell&>(cell));
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
void visit(Ptr<T> cell)
|
||||
{
|
||||
if (cell)
|
||||
visit_impl(const_cast<RemoveConst<T>&>(*cell.ptr()));
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
void visit(Ref<T> cell)
|
||||
{
|
||||
visit_impl(const_cast<RemoveConst<T>&>(*cell.ptr()));
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
void visit(ReadonlySpan<T> span)
|
||||
{
|
||||
for (auto& value : span)
|
||||
visit(value);
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
void visit(Span<T> span)
|
||||
{
|
||||
for (auto& value : span)
|
||||
visit(value);
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
void visit(Vector<T> const& vector)
|
||||
{
|
||||
for (auto& value : vector)
|
||||
visit(value);
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
void visit(HashTable<T> const& table)
|
||||
{
|
||||
for (auto& value : table)
|
||||
visit(value);
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
void visit(OrderedHashTable<T> const& table)
|
||||
{
|
||||
for (auto& value : table)
|
||||
visit(value);
|
||||
}
|
||||
|
||||
template<typename K, typename V, typename T>
|
||||
void visit(HashMap<K, V, T> const& map)
|
||||
{
|
||||
for (auto& it : map) {
|
||||
if constexpr (requires { visit(it.key); })
|
||||
visit(it.key);
|
||||
if constexpr (requires { visit(it.value); })
|
||||
visit(it.value);
|
||||
}
|
||||
}
|
||||
|
||||
template<typename K, typename V, typename T>
|
||||
void visit(OrderedHashMap<K, V, T> const& map)
|
||||
{
|
||||
for (auto& it : map) {
|
||||
if constexpr (requires { visit(it.key); })
|
||||
visit(it.key);
|
||||
if constexpr (requires { visit(it.value); })
|
||||
visit(it.value);
|
||||
}
|
||||
}
|
||||
|
||||
void visit(NanBoxedValue const& value);
|
||||
|
||||
// Allow explicitly ignoring a GC-allocated member in a visit_edges implementation instead
|
||||
// of just not using it.
|
||||
template<typename T>
|
||||
void ignore(T const&)
|
||||
{
|
||||
}
|
||||
|
||||
virtual void visit_possible_values(ReadonlyBytes) = 0;
|
||||
|
||||
protected:
|
||||
virtual void visit_impl(Cell&) = 0;
|
||||
virtual ~Visitor() = default;
|
||||
};
|
||||
|
||||
virtual void visit_edges(Visitor&) { }
|
||||
|
||||
// This will be called on unmarked objects by the garbage collector in a separate pass before destruction.
|
||||
virtual void finalize() { }
|
||||
|
||||
// This allows cells to survive GC by choice, even if nothing points to them.
|
||||
// It's used to implement special rules in the web platform.
|
||||
// NOTE: Cells must call set_overrides_must_survive_garbage_collection() for this to be honored.
|
||||
virtual bool must_survive_garbage_collection() const { return false; }
|
||||
|
||||
bool overrides_must_survive_garbage_collection(Badge<Heap>) const { return m_overrides_must_survive_garbage_collection; }
|
||||
|
||||
ALWAYS_INLINE Heap& heap() const { return HeapBlockBase::from_cell(this)->heap(); }
|
||||
|
||||
protected:
|
||||
Cell() = default;
|
||||
|
||||
ALWAYS_INLINE void* private_data() const { return bit_cast<HeapBase*>(&heap())->private_data(); }
|
||||
|
||||
void set_overrides_must_survive_garbage_collection(bool b) { m_overrides_must_survive_garbage_collection = b; }
|
||||
|
||||
private:
|
||||
bool m_mark : 1 { false };
|
||||
bool m_overrides_must_survive_garbage_collection : 1 { false };
|
||||
State m_state : 1 { State::Live };
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
template<>
|
||||
struct AK::Formatter<GC::Cell> : AK::Formatter<FormatString> {
|
||||
ErrorOr<void> format(FormatBuilder& builder, GC::Cell const* cell)
|
||||
{
|
||||
if (!cell)
|
||||
return builder.put_string("Cell{nullptr}"sv);
|
||||
return Formatter<FormatString>::format(builder, "{}({})"sv, cell->class_name(), cell);
|
||||
}
|
||||
};
|
58
Libraries/LibGC/CellAllocator.cpp
Normal file
58
Libraries/LibGC/CellAllocator.cpp
Normal file
|
@ -0,0 +1,58 @@
|
|||
/*
|
||||
* Copyright (c) 2020-2023, Andreas Kling <andreas@ladybird.org>
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*/
|
||||
|
||||
#include <AK/Badge.h>
|
||||
#include <LibGC/BlockAllocator.h>
|
||||
#include <LibGC/CellAllocator.h>
|
||||
#include <LibGC/Heap.h>
|
||||
#include <LibGC/HeapBlock.h>
|
||||
|
||||
namespace GC {
|
||||
|
||||
CellAllocator::CellAllocator(size_t cell_size, char const* class_name)
|
||||
: m_class_name(class_name)
|
||||
, m_cell_size(cell_size)
|
||||
{
|
||||
}
|
||||
|
||||
Cell* CellAllocator::allocate_cell(Heap& heap)
|
||||
{
|
||||
if (!m_list_node.is_in_list())
|
||||
heap.register_cell_allocator({}, *this);
|
||||
|
||||
if (m_usable_blocks.is_empty()) {
|
||||
auto block = HeapBlock::create_with_cell_size(heap, *this, m_cell_size, m_class_name);
|
||||
auto block_ptr = reinterpret_cast<FlatPtr>(block.ptr());
|
||||
if (m_min_block_address > block_ptr)
|
||||
m_min_block_address = block_ptr;
|
||||
if (m_max_block_address < block_ptr)
|
||||
m_max_block_address = block_ptr;
|
||||
m_usable_blocks.append(*block.leak_ptr());
|
||||
}
|
||||
|
||||
auto& block = *m_usable_blocks.last();
|
||||
auto* cell = block.allocate();
|
||||
VERIFY(cell);
|
||||
if (block.is_full())
|
||||
m_full_blocks.append(*m_usable_blocks.last());
|
||||
return cell;
|
||||
}
|
||||
|
||||
void CellAllocator::block_did_become_empty(Badge<Heap>, HeapBlock& block)
|
||||
{
|
||||
block.m_list_node.remove();
|
||||
// NOTE: HeapBlocks are managed by the BlockAllocator, so we don't want to `delete` the block here.
|
||||
block.~HeapBlock();
|
||||
m_block_allocator.deallocate_block(&block);
|
||||
}
|
||||
|
||||
void CellAllocator::block_did_become_usable(Badge<Heap>, HeapBlock& block)
|
||||
{
|
||||
VERIFY(!block.is_full());
|
||||
m_usable_blocks.append(block);
|
||||
}
|
||||
|
||||
}
|
83
Libraries/LibGC/CellAllocator.h
Normal file
83
Libraries/LibGC/CellAllocator.h
Normal file
|
@ -0,0 +1,83 @@
|
|||
/*
|
||||
* Copyright (c) 2020-2023, Andreas Kling <andreas@ladybird.org>
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <AK/IntrusiveList.h>
|
||||
#include <AK/NeverDestroyed.h>
|
||||
#include <AK/NonnullOwnPtr.h>
|
||||
#include <LibGC/BlockAllocator.h>
|
||||
#include <LibGC/Forward.h>
|
||||
#include <LibGC/HeapBlock.h>
|
||||
|
||||
#define GC_DECLARE_ALLOCATOR(ClassName) \
|
||||
static GC::TypeIsolatingCellAllocator<ClassName> cell_allocator
|
||||
|
||||
#define GC_DEFINE_ALLOCATOR(ClassName) \
|
||||
GC::TypeIsolatingCellAllocator<ClassName> ClassName::cell_allocator { #ClassName }
|
||||
|
||||
namespace GC {
|
||||
|
||||
class CellAllocator {
|
||||
public:
|
||||
CellAllocator(size_t cell_size, char const* class_name = nullptr);
|
||||
~CellAllocator() = default;
|
||||
|
||||
size_t cell_size() const { return m_cell_size; }
|
||||
|
||||
Cell* allocate_cell(Heap&);
|
||||
|
||||
template<typename Callback>
|
||||
IterationDecision for_each_block(Callback callback)
|
||||
{
|
||||
for (auto& block : m_full_blocks) {
|
||||
if (callback(block) == IterationDecision::Break)
|
||||
return IterationDecision::Break;
|
||||
}
|
||||
for (auto& block : m_usable_blocks) {
|
||||
if (callback(block) == IterationDecision::Break)
|
||||
return IterationDecision::Break;
|
||||
}
|
||||
return IterationDecision::Continue;
|
||||
}
|
||||
|
||||
void block_did_become_empty(Badge<Heap>, HeapBlock&);
|
||||
void block_did_become_usable(Badge<Heap>, HeapBlock&);
|
||||
|
||||
IntrusiveListNode<CellAllocator> m_list_node;
|
||||
using List = IntrusiveList<&CellAllocator::m_list_node>;
|
||||
|
||||
BlockAllocator& block_allocator() { return m_block_allocator; }
|
||||
FlatPtr min_block_address() const { return m_min_block_address; }
|
||||
FlatPtr max_block_address() const { return m_max_block_address; }
|
||||
|
||||
private:
|
||||
char const* const m_class_name { nullptr };
|
||||
size_t const m_cell_size;
|
||||
|
||||
BlockAllocator m_block_allocator;
|
||||
|
||||
using BlockList = IntrusiveList<&HeapBlock::m_list_node>;
|
||||
BlockList m_full_blocks;
|
||||
BlockList m_usable_blocks;
|
||||
FlatPtr m_min_block_address { explode_byte(0xff) };
|
||||
FlatPtr m_max_block_address { 0 };
|
||||
};
|
||||
|
||||
template<typename T>
|
||||
class TypeIsolatingCellAllocator {
|
||||
public:
|
||||
using CellType = T;
|
||||
|
||||
TypeIsolatingCellAllocator(char const* class_name)
|
||||
: allocator(sizeof(T), class_name)
|
||||
{
|
||||
}
|
||||
|
||||
NeverDestroyed<CellAllocator> allocator;
|
||||
};
|
||||
|
||||
}
|
23
Libraries/LibGC/ConservativeVector.cpp
Normal file
23
Libraries/LibGC/ConservativeVector.cpp
Normal file
|
@ -0,0 +1,23 @@
|
|||
/*
|
||||
* Copyright (c) 2024, Andreas Kling <andreas@ladybird.org>
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*/
|
||||
|
||||
#include <LibGC/ConservativeVector.h>
|
||||
#include <LibGC/Heap.h>
|
||||
|
||||
namespace GC {
|
||||
|
||||
ConservativeVectorBase::ConservativeVectorBase(Heap& heap)
|
||||
: m_heap(&heap)
|
||||
{
|
||||
m_heap->did_create_conservative_vector({}, *this);
|
||||
}
|
||||
|
||||
ConservativeVectorBase::~ConservativeVectorBase()
|
||||
{
|
||||
m_heap->did_destroy_conservative_vector({}, *this);
|
||||
}
|
||||
|
||||
}
|
77
Libraries/LibGC/ConservativeVector.h
Normal file
77
Libraries/LibGC/ConservativeVector.h
Normal file
|
@ -0,0 +1,77 @@
|
|||
/*
|
||||
* Copyright (c) 2024, Andreas Kling <andreas@ladybird.org>
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <AK/HashMap.h>
|
||||
#include <AK/IntrusiveList.h>
|
||||
#include <AK/Vector.h>
|
||||
#include <LibGC/Cell.h>
|
||||
#include <LibGC/Forward.h>
|
||||
#include <LibGC/HeapRoot.h>
|
||||
|
||||
namespace GC {
|
||||
|
||||
class ConservativeVectorBase {
|
||||
public:
|
||||
virtual ReadonlySpan<FlatPtr> possible_values() const = 0;
|
||||
|
||||
protected:
|
||||
explicit ConservativeVectorBase(Heap&);
|
||||
~ConservativeVectorBase();
|
||||
|
||||
ConservativeVectorBase& operator=(ConservativeVectorBase const&);
|
||||
|
||||
Heap* m_heap { nullptr };
|
||||
IntrusiveListNode<ConservativeVectorBase> m_list_node;
|
||||
|
||||
public:
|
||||
using List = IntrusiveList<&ConservativeVectorBase::m_list_node>;
|
||||
};
|
||||
|
||||
template<typename T, size_t inline_capacity>
|
||||
class ConservativeVector final
|
||||
: public ConservativeVectorBase
|
||||
, public Vector<T, inline_capacity> {
|
||||
|
||||
public:
|
||||
explicit ConservativeVector(Heap& heap)
|
||||
: ConservativeVectorBase(heap)
|
||||
{
|
||||
}
|
||||
|
||||
virtual ~ConservativeVector() = default;
|
||||
|
||||
ConservativeVector(ConservativeVector const& other)
|
||||
: ConservativeVectorBase(*other.m_heap)
|
||||
, Vector<T, inline_capacity>(other)
|
||||
{
|
||||
}
|
||||
|
||||
ConservativeVector(ConservativeVector&& other)
|
||||
: ConservativeVectorBase(*other.m_heap)
|
||||
, Vector<T, inline_capacity>(move(static_cast<Vector<T, inline_capacity>&>(other)))
|
||||
{
|
||||
}
|
||||
|
||||
ConservativeVector& operator=(ConservativeVector const& other)
|
||||
{
|
||||
Vector<T, inline_capacity>::operator=(other);
|
||||
ConservativeVectorBase::operator=(other);
|
||||
return *this;
|
||||
}
|
||||
|
||||
virtual ReadonlySpan<FlatPtr> possible_values() const override
|
||||
{
|
||||
static_assert(sizeof(T) >= sizeof(FlatPtr));
|
||||
return ReadonlySpan<FlatPtr> {
|
||||
reinterpret_cast<FlatPtr const*>(this->data()),
|
||||
this->size() * sizeof(T) / sizeof(FlatPtr),
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
}
|
30
Libraries/LibGC/DeferGC.h
Normal file
30
Libraries/LibGC/DeferGC.h
Normal file
|
@ -0,0 +1,30 @@
|
|||
/*
|
||||
* Copyright (c) 2020, Andreas Kling <andreas@ladybird.org>
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <LibGC/Heap.h>
|
||||
|
||||
namespace GC {
|
||||
|
||||
class DeferGC {
|
||||
public:
|
||||
explicit DeferGC(Heap& heap)
|
||||
: m_heap(heap)
|
||||
{
|
||||
m_heap.defer_gc();
|
||||
}
|
||||
|
||||
~DeferGC()
|
||||
{
|
||||
m_heap.undefer_gc();
|
||||
}
|
||||
|
||||
private:
|
||||
Heap& m_heap;
|
||||
};
|
||||
|
||||
}
|
32
Libraries/LibGC/Forward.h
Normal file
32
Libraries/LibGC/Forward.h
Normal file
|
@ -0,0 +1,32 @@
|
|||
/*
|
||||
* Copyright (c) 2024, Shannon Booth <shannon@serenityos.org>
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
namespace GC {
|
||||
|
||||
class Cell;
|
||||
class CellAllocator;
|
||||
class DeferGC;
|
||||
class RootImpl;
|
||||
class Heap;
|
||||
class HeapBlock;
|
||||
class NanBoxedValue;
|
||||
class WeakContainer;
|
||||
|
||||
template<typename T>
|
||||
class Function;
|
||||
|
||||
template<class T>
|
||||
class Root;
|
||||
|
||||
template<class T, size_t inline_capacity = 0>
|
||||
class ConservativeVector;
|
||||
|
||||
template<class T, size_t inline_capacity = 0>
|
||||
class MarkedVector;
|
||||
|
||||
}
|
50
Libraries/LibGC/Function.h
Normal file
50
Libraries/LibGC/Function.h
Normal file
|
@ -0,0 +1,50 @@
|
|||
/*
|
||||
* Copyright (c) 2023, Andreas Kling <andreas@ladybird.org>
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <AK/Function.h>
|
||||
#include <LibGC/Cell.h>
|
||||
#include <LibGC/Heap.h>
|
||||
|
||||
namespace GC {
|
||||
|
||||
template<typename T>
|
||||
class Function final : public Cell {
|
||||
GC_CELL(Function, Cell);
|
||||
|
||||
public:
|
||||
static Ref<Function> create(Heap& heap, AK::Function<T> function)
|
||||
{
|
||||
return heap.allocate<Function>(move(function));
|
||||
}
|
||||
|
||||
virtual ~Function() override = default;
|
||||
|
||||
[[nodiscard]] AK::Function<T> const& function() const { return m_function; }
|
||||
|
||||
private:
|
||||
Function(AK::Function<T> function)
|
||||
: m_function(move(function))
|
||||
{
|
||||
}
|
||||
|
||||
virtual void visit_edges(Visitor& visitor) override
|
||||
{
|
||||
Base::visit_edges(visitor);
|
||||
visitor.visit_possible_values(m_function.raw_capture_range());
|
||||
}
|
||||
|
||||
AK::Function<T> m_function;
|
||||
};
|
||||
|
||||
template<typename Callable, typename T = EquivalentFunctionType<Callable>>
|
||||
static Ref<Function<T>> create_function(Heap& heap, Callable&& function)
|
||||
{
|
||||
return Function<T>::create(heap, AK::Function<T> { forward<Callable>(function) });
|
||||
}
|
||||
|
||||
}
|
538
Libraries/LibGC/Heap.cpp
Normal file
538
Libraries/LibGC/Heap.cpp
Normal file
|
@ -0,0 +1,538 @@
|
|||
/*
|
||||
* Copyright (c) 2020-2022, Andreas Kling <andreas@ladybird.org>
|
||||
* Copyright (c) 2023, Aliaksandr Kalenik <kalenik.aliaksandr@gmail.com>
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*/
|
||||
|
||||
#include <AK/Badge.h>
|
||||
#include <AK/Debug.h>
|
||||
#include <AK/Function.h>
|
||||
#include <AK/HashTable.h>
|
||||
#include <AK/JsonArray.h>
|
||||
#include <AK/JsonObject.h>
|
||||
#include <AK/Platform.h>
|
||||
#include <AK/StackInfo.h>
|
||||
#include <AK/TemporaryChange.h>
|
||||
#include <LibCore/ElapsedTimer.h>
|
||||
#include <LibGC/CellAllocator.h>
|
||||
#include <LibGC/Heap.h>
|
||||
#include <LibGC/HeapBlock.h>
|
||||
#include <LibGC/NanBoxedValue.h>
|
||||
#include <LibGC/Root.h>
|
||||
#include <setjmp.h>
|
||||
|
||||
#ifdef HAS_ADDRESS_SANITIZER
|
||||
# include <sanitizer/asan_interface.h>
|
||||
#endif
|
||||
|
||||
namespace GC {
|
||||
|
||||
Heap::Heap(void* private_data, AK::Function<void(HashMap<Cell*, GC::HeapRoot>&)> gather_embedder_roots)
|
||||
: HeapBase(private_data)
|
||||
, m_gather_embedder_roots(move(gather_embedder_roots))
|
||||
{
|
||||
static_assert(HeapBlock::min_possible_cell_size <= 32, "Heap Cell tracking uses too much data!");
|
||||
m_size_based_cell_allocators.append(make<CellAllocator>(64));
|
||||
m_size_based_cell_allocators.append(make<CellAllocator>(96));
|
||||
m_size_based_cell_allocators.append(make<CellAllocator>(128));
|
||||
m_size_based_cell_allocators.append(make<CellAllocator>(256));
|
||||
m_size_based_cell_allocators.append(make<CellAllocator>(512));
|
||||
m_size_based_cell_allocators.append(make<CellAllocator>(1024));
|
||||
m_size_based_cell_allocators.append(make<CellAllocator>(3072));
|
||||
}
|
||||
|
||||
Heap::~Heap()
|
||||
{
|
||||
collect_garbage(CollectionType::CollectEverything);
|
||||
}
|
||||
|
||||
void Heap::will_allocate(size_t size)
|
||||
{
|
||||
if (should_collect_on_every_allocation()) {
|
||||
m_allocated_bytes_since_last_gc = 0;
|
||||
collect_garbage();
|
||||
} else if (m_allocated_bytes_since_last_gc + size > m_gc_bytes_threshold) {
|
||||
m_allocated_bytes_since_last_gc = 0;
|
||||
collect_garbage();
|
||||
}
|
||||
|
||||
m_allocated_bytes_since_last_gc += size;
|
||||
}
|
||||
|
||||
static void add_possible_value(HashMap<FlatPtr, HeapRoot>& possible_pointers, FlatPtr data, HeapRoot origin, FlatPtr min_block_address, FlatPtr max_block_address)
|
||||
{
|
||||
if constexpr (sizeof(FlatPtr*) == sizeof(NanBoxedValue)) {
|
||||
// Because NanBoxedValue stores pointers in non-canonical form we have to check if the top bytes
|
||||
// match any pointer-backed tag, in that case we have to extract the pointer to its
|
||||
// canonical form and add that as a possible pointer.
|
||||
FlatPtr possible_pointer;
|
||||
if ((data & SHIFTED_IS_CELL_PATTERN) == SHIFTED_IS_CELL_PATTERN)
|
||||
possible_pointer = NanBoxedValue::extract_pointer_bits(data);
|
||||
else
|
||||
possible_pointer = data;
|
||||
if (possible_pointer < min_block_address || possible_pointer > max_block_address)
|
||||
return;
|
||||
possible_pointers.set(possible_pointer, move(origin));
|
||||
} else {
|
||||
static_assert((sizeof(NanBoxedValue) % sizeof(FlatPtr*)) == 0);
|
||||
if (data < min_block_address || data > max_block_address)
|
||||
return;
|
||||
// In the 32-bit case we will look at the top and bottom part of NanBoxedValue separately we just
|
||||
// add both the upper and lower bytes as possible pointers.
|
||||
possible_pointers.set(data, move(origin));
|
||||
}
|
||||
}
|
||||
|
||||
void Heap::find_min_and_max_block_addresses(FlatPtr& min_address, FlatPtr& max_address)
|
||||
{
|
||||
min_address = explode_byte(0xff);
|
||||
max_address = 0;
|
||||
for (auto& allocator : m_all_cell_allocators) {
|
||||
min_address = min(min_address, allocator.min_block_address());
|
||||
max_address = max(max_address, allocator.max_block_address() + HeapBlockBase::block_size);
|
||||
}
|
||||
}
|
||||
|
||||
template<typename Callback>
|
||||
static void for_each_cell_among_possible_pointers(HashTable<HeapBlock*> const& all_live_heap_blocks, HashMap<FlatPtr, HeapRoot>& possible_pointers, Callback callback)
|
||||
{
|
||||
for (auto possible_pointer : possible_pointers.keys()) {
|
||||
if (!possible_pointer)
|
||||
continue;
|
||||
auto* possible_heap_block = HeapBlock::from_cell(reinterpret_cast<Cell const*>(possible_pointer));
|
||||
if (!all_live_heap_blocks.contains(possible_heap_block))
|
||||
continue;
|
||||
if (auto* cell = possible_heap_block->cell_from_possible_pointer(possible_pointer)) {
|
||||
callback(cell, possible_pointer);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class GraphConstructorVisitor final : public Cell::Visitor {
|
||||
public:
|
||||
explicit GraphConstructorVisitor(Heap& heap, HashMap<Cell*, HeapRoot> const& roots)
|
||||
: m_heap(heap)
|
||||
{
|
||||
m_heap.find_min_and_max_block_addresses(m_min_block_address, m_max_block_address);
|
||||
m_heap.for_each_block([&](auto& block) {
|
||||
m_all_live_heap_blocks.set(&block);
|
||||
return IterationDecision::Continue;
|
||||
});
|
||||
|
||||
for (auto& [root, root_origin] : roots) {
|
||||
auto& graph_node = m_graph.ensure(bit_cast<FlatPtr>(root));
|
||||
graph_node.class_name = root->class_name();
|
||||
graph_node.root_origin = root_origin;
|
||||
|
||||
m_work_queue.append(*root);
|
||||
}
|
||||
}
|
||||
|
||||
virtual void visit_impl(Cell& cell) override
|
||||
{
|
||||
if (m_node_being_visited)
|
||||
m_node_being_visited->edges.set(reinterpret_cast<FlatPtr>(&cell));
|
||||
|
||||
if (m_graph.get(reinterpret_cast<FlatPtr>(&cell)).has_value())
|
||||
return;
|
||||
|
||||
m_work_queue.append(cell);
|
||||
}
|
||||
|
||||
virtual void visit_possible_values(ReadonlyBytes bytes) override
|
||||
{
|
||||
HashMap<FlatPtr, HeapRoot> possible_pointers;
|
||||
|
||||
auto* raw_pointer_sized_values = reinterpret_cast<FlatPtr const*>(bytes.data());
|
||||
for (size_t i = 0; i < (bytes.size() / sizeof(FlatPtr)); ++i)
|
||||
add_possible_value(possible_pointers, raw_pointer_sized_values[i], HeapRoot { .type = HeapRoot::Type::HeapFunctionCapturedPointer }, m_min_block_address, m_max_block_address);
|
||||
|
||||
for_each_cell_among_possible_pointers(m_all_live_heap_blocks, possible_pointers, [&](Cell* cell, FlatPtr) {
|
||||
if (m_node_being_visited)
|
||||
m_node_being_visited->edges.set(reinterpret_cast<FlatPtr>(cell));
|
||||
|
||||
if (m_graph.get(reinterpret_cast<FlatPtr>(&cell)).has_value())
|
||||
return;
|
||||
m_work_queue.append(*cell);
|
||||
});
|
||||
}
|
||||
|
||||
void visit_all_cells()
|
||||
{
|
||||
while (!m_work_queue.is_empty()) {
|
||||
auto cell = m_work_queue.take_last();
|
||||
m_node_being_visited = &m_graph.ensure(bit_cast<FlatPtr>(cell.ptr()));
|
||||
m_node_being_visited->class_name = cell->class_name();
|
||||
cell->visit_edges(*this);
|
||||
m_node_being_visited = nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
AK::JsonObject dump()
|
||||
{
|
||||
auto graph = AK::JsonObject();
|
||||
for (auto& it : m_graph) {
|
||||
AK::JsonArray edges;
|
||||
for (auto const& value : it.value.edges) {
|
||||
edges.must_append(ByteString::formatted("{}", value));
|
||||
}
|
||||
|
||||
auto node = AK::JsonObject();
|
||||
if (it.value.root_origin.has_value()) {
|
||||
auto type = it.value.root_origin->type;
|
||||
auto location = it.value.root_origin->location;
|
||||
switch (type) {
|
||||
case HeapRoot::Type::Root:
|
||||
node.set("root"sv, ByteString::formatted("Root {} {}:{}", location->function_name(), location->filename(), location->line_number()));
|
||||
break;
|
||||
case HeapRoot::Type::MarkedVector:
|
||||
node.set("root"sv, "MarkedVector");
|
||||
break;
|
||||
case HeapRoot::Type::RegisterPointer:
|
||||
node.set("root"sv, "RegisterPointer");
|
||||
break;
|
||||
case HeapRoot::Type::StackPointer:
|
||||
node.set("root"sv, "StackPointer");
|
||||
break;
|
||||
case HeapRoot::Type::VM:
|
||||
node.set("root"sv, "VM");
|
||||
break;
|
||||
default:
|
||||
VERIFY_NOT_REACHED();
|
||||
}
|
||||
}
|
||||
node.set("class_name"sv, it.value.class_name);
|
||||
node.set("edges"sv, edges);
|
||||
graph.set(ByteString::number(it.key), node);
|
||||
}
|
||||
|
||||
return graph;
|
||||
}
|
||||
|
||||
private:
|
||||
struct GraphNode {
|
||||
Optional<HeapRoot> root_origin;
|
||||
StringView class_name;
|
||||
HashTable<FlatPtr> edges {};
|
||||
};
|
||||
|
||||
GraphNode* m_node_being_visited { nullptr };
|
||||
Vector<Ref<Cell>> m_work_queue;
|
||||
HashMap<FlatPtr, GraphNode> m_graph;
|
||||
|
||||
Heap& m_heap;
|
||||
HashTable<HeapBlock*> m_all_live_heap_blocks;
|
||||
FlatPtr m_min_block_address;
|
||||
FlatPtr m_max_block_address;
|
||||
};
|
||||
|
||||
AK::JsonObject Heap::dump_graph()
|
||||
{
|
||||
HashMap<Cell*, HeapRoot> roots;
|
||||
gather_roots(roots);
|
||||
GraphConstructorVisitor visitor(*this, roots);
|
||||
visitor.visit_all_cells();
|
||||
return visitor.dump();
|
||||
}
|
||||
|
||||
void Heap::collect_garbage(CollectionType collection_type, bool print_report)
|
||||
{
|
||||
VERIFY(!m_collecting_garbage);
|
||||
TemporaryChange change(m_collecting_garbage, true);
|
||||
|
||||
Core::ElapsedTimer collection_measurement_timer;
|
||||
if (print_report)
|
||||
collection_measurement_timer.start();
|
||||
|
||||
if (collection_type == CollectionType::CollectGarbage) {
|
||||
if (m_gc_deferrals) {
|
||||
m_should_gc_when_deferral_ends = true;
|
||||
return;
|
||||
}
|
||||
HashMap<Cell*, HeapRoot> roots;
|
||||
gather_roots(roots);
|
||||
mark_live_cells(roots);
|
||||
}
|
||||
finalize_unmarked_cells();
|
||||
sweep_dead_cells(print_report, collection_measurement_timer);
|
||||
}
|
||||
|
||||
void Heap::gather_roots(HashMap<Cell*, HeapRoot>& roots)
|
||||
{
|
||||
m_gather_embedder_roots(roots);
|
||||
gather_conservative_roots(roots);
|
||||
|
||||
for (auto& root : m_roots)
|
||||
roots.set(root.cell(), HeapRoot { .type = HeapRoot::Type::Root, .location = &root.source_location() });
|
||||
|
||||
for (auto& vector : m_marked_vectors)
|
||||
vector.gather_roots(roots);
|
||||
|
||||
if constexpr (HEAP_DEBUG) {
|
||||
dbgln("gather_roots:");
|
||||
for (auto* root : roots.keys())
|
||||
dbgln(" + {}", root);
|
||||
}
|
||||
}
|
||||
|
||||
#ifdef HAS_ADDRESS_SANITIZER
|
||||
NO_SANITIZE_ADDRESS void Heap::gather_asan_fake_stack_roots(HashMap<FlatPtr, HeapRoot>& possible_pointers, FlatPtr addr, FlatPtr min_block_address, FlatPtr max_block_address)
|
||||
{
|
||||
void* begin = nullptr;
|
||||
void* end = nullptr;
|
||||
void* real_stack = __asan_addr_is_in_fake_stack(__asan_get_current_fake_stack(), reinterpret_cast<void*>(addr), &begin, &end);
|
||||
|
||||
if (real_stack != nullptr) {
|
||||
for (auto* real_stack_addr = reinterpret_cast<void const* const*>(begin); real_stack_addr < end; ++real_stack_addr) {
|
||||
void const* real_address = *real_stack_addr;
|
||||
if (real_address == nullptr)
|
||||
continue;
|
||||
add_possible_value(possible_pointers, reinterpret_cast<FlatPtr>(real_address), HeapRoot { .type = HeapRoot::Type::StackPointer }, min_block_address, max_block_address);
|
||||
}
|
||||
}
|
||||
}
|
||||
#else
|
||||
void Heap::gather_asan_fake_stack_roots(HashMap<FlatPtr, HeapRoot>&, FlatPtr, FlatPtr, FlatPtr)
|
||||
{
|
||||
}
|
||||
#endif
|
||||
|
||||
NO_SANITIZE_ADDRESS void Heap::gather_conservative_roots(HashMap<Cell*, HeapRoot>& roots)
|
||||
{
|
||||
FlatPtr dummy;
|
||||
|
||||
dbgln_if(HEAP_DEBUG, "gather_conservative_roots:");
|
||||
|
||||
jmp_buf buf;
|
||||
setjmp(buf);
|
||||
|
||||
HashMap<FlatPtr, HeapRoot> possible_pointers;
|
||||
|
||||
auto* raw_jmp_buf = reinterpret_cast<FlatPtr const*>(buf);
|
||||
|
||||
FlatPtr min_block_address, max_block_address;
|
||||
find_min_and_max_block_addresses(min_block_address, max_block_address);
|
||||
|
||||
for (size_t i = 0; i < ((size_t)sizeof(buf)) / sizeof(FlatPtr); ++i)
|
||||
add_possible_value(possible_pointers, raw_jmp_buf[i], HeapRoot { .type = HeapRoot::Type::RegisterPointer }, min_block_address, max_block_address);
|
||||
|
||||
auto stack_reference = bit_cast<FlatPtr>(&dummy);
|
||||
|
||||
for (FlatPtr stack_address = stack_reference; stack_address < m_stack_info.top(); stack_address += sizeof(FlatPtr)) {
|
||||
auto data = *reinterpret_cast<FlatPtr*>(stack_address);
|
||||
add_possible_value(possible_pointers, data, HeapRoot { .type = HeapRoot::Type::StackPointer }, min_block_address, max_block_address);
|
||||
gather_asan_fake_stack_roots(possible_pointers, data, min_block_address, max_block_address);
|
||||
}
|
||||
|
||||
for (auto& vector : m_conservative_vectors) {
|
||||
for (auto possible_value : vector.possible_values()) {
|
||||
add_possible_value(possible_pointers, possible_value, HeapRoot { .type = HeapRoot::Type::ConservativeVector }, min_block_address, max_block_address);
|
||||
}
|
||||
}
|
||||
|
||||
HashTable<HeapBlock*> all_live_heap_blocks;
|
||||
for_each_block([&](auto& block) {
|
||||
all_live_heap_blocks.set(&block);
|
||||
return IterationDecision::Continue;
|
||||
});
|
||||
|
||||
for_each_cell_among_possible_pointers(all_live_heap_blocks, possible_pointers, [&](Cell* cell, FlatPtr possible_pointer) {
|
||||
if (cell->state() == Cell::State::Live) {
|
||||
dbgln_if(HEAP_DEBUG, " ?-> {}", (void const*)cell);
|
||||
roots.set(cell, *possible_pointers.get(possible_pointer));
|
||||
} else {
|
||||
dbgln_if(HEAP_DEBUG, " #-> {}", (void const*)cell);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
class MarkingVisitor final : public Cell::Visitor {
|
||||
public:
|
||||
explicit MarkingVisitor(Heap& heap, HashMap<Cell*, HeapRoot> const& roots)
|
||||
: m_heap(heap)
|
||||
{
|
||||
m_heap.find_min_and_max_block_addresses(m_min_block_address, m_max_block_address);
|
||||
m_heap.for_each_block([&](auto& block) {
|
||||
m_all_live_heap_blocks.set(&block);
|
||||
return IterationDecision::Continue;
|
||||
});
|
||||
|
||||
for (auto* root : roots.keys()) {
|
||||
visit(root);
|
||||
}
|
||||
}
|
||||
|
||||
virtual void visit_impl(Cell& cell) override
|
||||
{
|
||||
if (cell.is_marked())
|
||||
return;
|
||||
dbgln_if(HEAP_DEBUG, " ! {}", &cell);
|
||||
|
||||
cell.set_marked(true);
|
||||
m_work_queue.append(cell);
|
||||
}
|
||||
|
||||
virtual void visit_possible_values(ReadonlyBytes bytes) override
|
||||
{
|
||||
HashMap<FlatPtr, HeapRoot> possible_pointers;
|
||||
|
||||
auto* raw_pointer_sized_values = reinterpret_cast<FlatPtr const*>(bytes.data());
|
||||
for (size_t i = 0; i < (bytes.size() / sizeof(FlatPtr)); ++i)
|
||||
add_possible_value(possible_pointers, raw_pointer_sized_values[i], HeapRoot { .type = HeapRoot::Type::HeapFunctionCapturedPointer }, m_min_block_address, m_max_block_address);
|
||||
|
||||
for_each_cell_among_possible_pointers(m_all_live_heap_blocks, possible_pointers, [&](Cell* cell, FlatPtr) {
|
||||
if (cell->is_marked())
|
||||
return;
|
||||
if (cell->state() != Cell::State::Live)
|
||||
return;
|
||||
cell->set_marked(true);
|
||||
m_work_queue.append(*cell);
|
||||
});
|
||||
}
|
||||
|
||||
void mark_all_live_cells()
|
||||
{
|
||||
while (!m_work_queue.is_empty()) {
|
||||
m_work_queue.take_last()->visit_edges(*this);
|
||||
}
|
||||
}
|
||||
|
||||
private:
|
||||
Heap& m_heap;
|
||||
Vector<Ref<Cell>> m_work_queue;
|
||||
HashTable<HeapBlock*> m_all_live_heap_blocks;
|
||||
FlatPtr m_min_block_address;
|
||||
FlatPtr m_max_block_address;
|
||||
};
|
||||
|
||||
void Heap::mark_live_cells(HashMap<Cell*, HeapRoot> const& roots)
|
||||
{
|
||||
dbgln_if(HEAP_DEBUG, "mark_live_cells:");
|
||||
|
||||
MarkingVisitor visitor(*this, roots);
|
||||
|
||||
visitor.mark_all_live_cells();
|
||||
|
||||
for (auto& inverse_root : m_uprooted_cells)
|
||||
inverse_root->set_marked(false);
|
||||
|
||||
m_uprooted_cells.clear();
|
||||
}
|
||||
|
||||
bool Heap::cell_must_survive_garbage_collection(Cell const& cell)
|
||||
{
|
||||
if (!cell.overrides_must_survive_garbage_collection({}))
|
||||
return false;
|
||||
return cell.must_survive_garbage_collection();
|
||||
}
|
||||
|
||||
void Heap::finalize_unmarked_cells()
|
||||
{
|
||||
for_each_block([&](auto& block) {
|
||||
block.template for_each_cell_in_state<Cell::State::Live>([](Cell* cell) {
|
||||
if (!cell->is_marked() && !cell_must_survive_garbage_collection(*cell))
|
||||
cell->finalize();
|
||||
});
|
||||
return IterationDecision::Continue;
|
||||
});
|
||||
}
|
||||
|
||||
void Heap::sweep_dead_cells(bool print_report, Core::ElapsedTimer const& measurement_timer)
|
||||
{
|
||||
dbgln_if(HEAP_DEBUG, "sweep_dead_cells:");
|
||||
Vector<HeapBlock*, 32> empty_blocks;
|
||||
Vector<HeapBlock*, 32> full_blocks_that_became_usable;
|
||||
|
||||
size_t collected_cells = 0;
|
||||
size_t live_cells = 0;
|
||||
size_t collected_cell_bytes = 0;
|
||||
size_t live_cell_bytes = 0;
|
||||
|
||||
for_each_block([&](auto& block) {
|
||||
bool block_has_live_cells = false;
|
||||
bool block_was_full = block.is_full();
|
||||
block.template for_each_cell_in_state<Cell::State::Live>([&](Cell* cell) {
|
||||
if (!cell->is_marked() && !cell_must_survive_garbage_collection(*cell)) {
|
||||
dbgln_if(HEAP_DEBUG, " ~ {}", cell);
|
||||
block.deallocate(cell);
|
||||
++collected_cells;
|
||||
collected_cell_bytes += block.cell_size();
|
||||
} else {
|
||||
cell->set_marked(false);
|
||||
block_has_live_cells = true;
|
||||
++live_cells;
|
||||
live_cell_bytes += block.cell_size();
|
||||
}
|
||||
});
|
||||
if (!block_has_live_cells)
|
||||
empty_blocks.append(&block);
|
||||
else if (block_was_full != block.is_full())
|
||||
full_blocks_that_became_usable.append(&block);
|
||||
return IterationDecision::Continue;
|
||||
});
|
||||
|
||||
for (auto& weak_container : m_weak_containers)
|
||||
weak_container.remove_dead_cells({});
|
||||
|
||||
for (auto* block : empty_blocks) {
|
||||
dbgln_if(HEAP_DEBUG, " - HeapBlock empty @ {}: cell_size={}", block, block->cell_size());
|
||||
block->cell_allocator().block_did_become_empty({}, *block);
|
||||
}
|
||||
|
||||
for (auto* block : full_blocks_that_became_usable) {
|
||||
dbgln_if(HEAP_DEBUG, " - HeapBlock usable again @ {}: cell_size={}", block, block->cell_size());
|
||||
block->cell_allocator().block_did_become_usable({}, *block);
|
||||
}
|
||||
|
||||
if constexpr (HEAP_DEBUG) {
|
||||
for_each_block([&](auto& block) {
|
||||
dbgln(" > Live HeapBlock @ {}: cell_size={}", &block, block.cell_size());
|
||||
return IterationDecision::Continue;
|
||||
});
|
||||
}
|
||||
|
||||
m_gc_bytes_threshold = live_cell_bytes > GC_MIN_BYTES_THRESHOLD ? live_cell_bytes : GC_MIN_BYTES_THRESHOLD;
|
||||
|
||||
if (print_report) {
|
||||
AK::Duration const time_spent = measurement_timer.elapsed_time();
|
||||
size_t live_block_count = 0;
|
||||
for_each_block([&](auto&) {
|
||||
++live_block_count;
|
||||
return IterationDecision::Continue;
|
||||
});
|
||||
|
||||
dbgln("Garbage collection report");
|
||||
dbgln("=============================================");
|
||||
dbgln(" Time spent: {} ms", time_spent.to_milliseconds());
|
||||
dbgln(" Live cells: {} ({} bytes)", live_cells, live_cell_bytes);
|
||||
dbgln("Collected cells: {} ({} bytes)", collected_cells, collected_cell_bytes);
|
||||
dbgln(" Live blocks: {} ({} bytes)", live_block_count, live_block_count * HeapBlock::block_size);
|
||||
dbgln(" Freed blocks: {} ({} bytes)", empty_blocks.size(), empty_blocks.size() * HeapBlock::block_size);
|
||||
dbgln("=============================================");
|
||||
}
|
||||
}
|
||||
|
||||
void Heap::defer_gc()
|
||||
{
|
||||
++m_gc_deferrals;
|
||||
}
|
||||
|
||||
void Heap::undefer_gc()
|
||||
{
|
||||
VERIFY(m_gc_deferrals > 0);
|
||||
--m_gc_deferrals;
|
||||
|
||||
if (!m_gc_deferrals) {
|
||||
if (m_should_gc_when_deferral_ends)
|
||||
collect_garbage();
|
||||
m_should_gc_when_deferral_ends = false;
|
||||
}
|
||||
}
|
||||
|
||||
void Heap::uproot_cell(Cell* cell)
|
||||
{
|
||||
m_uprooted_cells.append(cell);
|
||||
}
|
||||
|
||||
}
|
205
Libraries/LibGC/Heap.h
Normal file
205
Libraries/LibGC/Heap.h
Normal file
|
@ -0,0 +1,205 @@
|
|||
/*
|
||||
* Copyright (c) 2020-2024, Andreas Kling <andreas@ladybird.org>
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <AK/Badge.h>
|
||||
#include <AK/Function.h>
|
||||
#include <AK/HashTable.h>
|
||||
#include <AK/IntrusiveList.h>
|
||||
#include <AK/Noncopyable.h>
|
||||
#include <AK/NonnullOwnPtr.h>
|
||||
#include <AK/StackInfo.h>
|
||||
#include <AK/Types.h>
|
||||
#include <AK/Vector.h>
|
||||
#include <LibCore/Forward.h>
|
||||
#include <LibGC/Cell.h>
|
||||
#include <LibGC/CellAllocator.h>
|
||||
#include <LibGC/ConservativeVector.h>
|
||||
#include <LibGC/Forward.h>
|
||||
#include <LibGC/HeapRoot.h>
|
||||
#include <LibGC/Internals.h>
|
||||
#include <LibGC/MarkedVector.h>
|
||||
#include <LibGC/Root.h>
|
||||
#include <LibGC/WeakContainer.h>
|
||||
|
||||
namespace GC {
|
||||
|
||||
class Heap : public HeapBase {
|
||||
AK_MAKE_NONCOPYABLE(Heap);
|
||||
AK_MAKE_NONMOVABLE(Heap);
|
||||
|
||||
public:
|
||||
explicit Heap(void* private_data, AK::Function<void(HashMap<Cell*, GC::HeapRoot>&)> gather_embedder_roots);
|
||||
~Heap();
|
||||
|
||||
template<typename T, typename... Args>
|
||||
Ref<T> allocate(Args&&... args)
|
||||
{
|
||||
auto* memory = allocate_cell<T>();
|
||||
defer_gc();
|
||||
new (memory) T(forward<Args>(args)...);
|
||||
undefer_gc();
|
||||
return *static_cast<T*>(memory);
|
||||
}
|
||||
|
||||
enum class CollectionType {
|
||||
CollectGarbage,
|
||||
CollectEverything,
|
||||
};
|
||||
|
||||
void collect_garbage(CollectionType = CollectionType::CollectGarbage, bool print_report = false);
|
||||
AK::JsonObject dump_graph();
|
||||
|
||||
bool should_collect_on_every_allocation() const { return m_should_collect_on_every_allocation; }
|
||||
void set_should_collect_on_every_allocation(bool b) { m_should_collect_on_every_allocation = b; }
|
||||
|
||||
void did_create_root(Badge<RootImpl>, RootImpl&);
|
||||
void did_destroy_root(Badge<RootImpl>, RootImpl&);
|
||||
|
||||
void did_create_marked_vector(Badge<MarkedVectorBase>, MarkedVectorBase&);
|
||||
void did_destroy_marked_vector(Badge<MarkedVectorBase>, MarkedVectorBase&);
|
||||
|
||||
void did_create_conservative_vector(Badge<ConservativeVectorBase>, ConservativeVectorBase&);
|
||||
void did_destroy_conservative_vector(Badge<ConservativeVectorBase>, ConservativeVectorBase&);
|
||||
|
||||
void did_create_weak_container(Badge<WeakContainer>, WeakContainer&);
|
||||
void did_destroy_weak_container(Badge<WeakContainer>, WeakContainer&);
|
||||
|
||||
void register_cell_allocator(Badge<CellAllocator>, CellAllocator&);
|
||||
|
||||
void uproot_cell(Cell* cell);
|
||||
|
||||
private:
|
||||
friend class MarkingVisitor;
|
||||
friend class GraphConstructorVisitor;
|
||||
friend class DeferGC;
|
||||
|
||||
void defer_gc();
|
||||
void undefer_gc();
|
||||
|
||||
static bool cell_must_survive_garbage_collection(Cell const&);
|
||||
|
||||
template<typename T>
|
||||
Cell* allocate_cell()
|
||||
{
|
||||
will_allocate(sizeof(T));
|
||||
if constexpr (requires { T::cell_allocator.allocator.get().allocate_cell(*this); }) {
|
||||
if constexpr (IsSame<T, typename decltype(T::cell_allocator)::CellType>) {
|
||||
return T::cell_allocator.allocator.get().allocate_cell(*this);
|
||||
}
|
||||
}
|
||||
return allocator_for_size(sizeof(T)).allocate_cell(*this);
|
||||
}
|
||||
|
||||
void will_allocate(size_t);
|
||||
|
||||
void find_min_and_max_block_addresses(FlatPtr& min_address, FlatPtr& max_address);
|
||||
void gather_roots(HashMap<Cell*, HeapRoot>&);
|
||||
void gather_conservative_roots(HashMap<Cell*, HeapRoot>&);
|
||||
void gather_asan_fake_stack_roots(HashMap<FlatPtr, HeapRoot>&, FlatPtr, FlatPtr min_block_address, FlatPtr max_block_address);
|
||||
void mark_live_cells(HashMap<Cell*, HeapRoot> const& live_cells);
|
||||
void finalize_unmarked_cells();
|
||||
void sweep_dead_cells(bool print_report, Core::ElapsedTimer const&);
|
||||
|
||||
ALWAYS_INLINE CellAllocator& allocator_for_size(size_t cell_size)
|
||||
{
|
||||
// FIXME: Use binary search?
|
||||
for (auto& allocator : m_size_based_cell_allocators) {
|
||||
if (allocator->cell_size() >= cell_size)
|
||||
return *allocator;
|
||||
}
|
||||
dbgln("Cannot get CellAllocator for cell size {}, largest available is {}!", cell_size, m_size_based_cell_allocators.last()->cell_size());
|
||||
VERIFY_NOT_REACHED();
|
||||
}
|
||||
|
||||
template<typename Callback>
|
||||
void for_each_block(Callback callback)
|
||||
{
|
||||
for (auto& allocator : m_all_cell_allocators) {
|
||||
if (allocator.for_each_block(callback) == IterationDecision::Break)
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
static constexpr size_t GC_MIN_BYTES_THRESHOLD { 4 * 1024 * 1024 };
|
||||
size_t m_gc_bytes_threshold { GC_MIN_BYTES_THRESHOLD };
|
||||
size_t m_allocated_bytes_since_last_gc { 0 };
|
||||
|
||||
bool m_should_collect_on_every_allocation { false };
|
||||
|
||||
Vector<NonnullOwnPtr<CellAllocator>> m_size_based_cell_allocators;
|
||||
CellAllocator::List m_all_cell_allocators;
|
||||
|
||||
RootImpl::List m_roots;
|
||||
MarkedVectorBase::List m_marked_vectors;
|
||||
ConservativeVectorBase::List m_conservative_vectors;
|
||||
WeakContainer::List m_weak_containers;
|
||||
|
||||
Vector<Ptr<Cell>> m_uprooted_cells;
|
||||
|
||||
size_t m_gc_deferrals { 0 };
|
||||
bool m_should_gc_when_deferral_ends { false };
|
||||
|
||||
bool m_collecting_garbage { false };
|
||||
StackInfo m_stack_info;
|
||||
AK::Function<void(HashMap<Cell*, GC::HeapRoot>&)> m_gather_embedder_roots;
|
||||
};
|
||||
|
||||
inline void Heap::did_create_root(Badge<RootImpl>, RootImpl& impl)
|
||||
{
|
||||
VERIFY(!m_roots.contains(impl));
|
||||
m_roots.append(impl);
|
||||
}
|
||||
|
||||
inline void Heap::did_destroy_root(Badge<RootImpl>, RootImpl& impl)
|
||||
{
|
||||
VERIFY(m_roots.contains(impl));
|
||||
m_roots.remove(impl);
|
||||
}
|
||||
|
||||
inline void Heap::did_create_marked_vector(Badge<MarkedVectorBase>, MarkedVectorBase& vector)
|
||||
{
|
||||
VERIFY(!m_marked_vectors.contains(vector));
|
||||
m_marked_vectors.append(vector);
|
||||
}
|
||||
|
||||
inline void Heap::did_destroy_marked_vector(Badge<MarkedVectorBase>, MarkedVectorBase& vector)
|
||||
{
|
||||
VERIFY(m_marked_vectors.contains(vector));
|
||||
m_marked_vectors.remove(vector);
|
||||
}
|
||||
|
||||
inline void Heap::did_create_conservative_vector(Badge<ConservativeVectorBase>, ConservativeVectorBase& vector)
|
||||
{
|
||||
VERIFY(!m_conservative_vectors.contains(vector));
|
||||
m_conservative_vectors.append(vector);
|
||||
}
|
||||
|
||||
inline void Heap::did_destroy_conservative_vector(Badge<ConservativeVectorBase>, ConservativeVectorBase& vector)
|
||||
{
|
||||
VERIFY(m_conservative_vectors.contains(vector));
|
||||
m_conservative_vectors.remove(vector);
|
||||
}
|
||||
|
||||
inline void Heap::did_create_weak_container(Badge<WeakContainer>, WeakContainer& set)
|
||||
{
|
||||
VERIFY(!m_weak_containers.contains(set));
|
||||
m_weak_containers.append(set);
|
||||
}
|
||||
|
||||
inline void Heap::did_destroy_weak_container(Badge<WeakContainer>, WeakContainer& set)
|
||||
{
|
||||
VERIFY(m_weak_containers.contains(set));
|
||||
m_weak_containers.remove(set);
|
||||
}
|
||||
|
||||
inline void Heap::register_cell_allocator(Badge<CellAllocator>, CellAllocator& allocator)
|
||||
{
|
||||
m_all_cell_allocators.append(allocator);
|
||||
}
|
||||
|
||||
}
|
64
Libraries/LibGC/HeapBlock.cpp
Normal file
64
Libraries/LibGC/HeapBlock.cpp
Normal file
|
@ -0,0 +1,64 @@
|
|||
/*
|
||||
* Copyright (c) 2020-2024, Andreas Kling <andreas@ladybird.org>
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*/
|
||||
|
||||
#include <AK/Assertions.h>
|
||||
#include <AK/NonnullOwnPtr.h>
|
||||
#include <AK/Platform.h>
|
||||
#include <LibGC/Heap.h>
|
||||
#include <LibGC/HeapBlock.h>
|
||||
#include <stdio.h>
|
||||
#include <sys/mman.h>
|
||||
|
||||
#ifdef HAS_ADDRESS_SANITIZER
|
||||
# include <sanitizer/asan_interface.h>
|
||||
#endif
|
||||
|
||||
namespace GC {
|
||||
|
||||
size_t HeapBlockBase::block_size = PAGE_SIZE;
|
||||
|
||||
NonnullOwnPtr<HeapBlock> HeapBlock::create_with_cell_size(Heap& heap, CellAllocator& cell_allocator, size_t cell_size, [[maybe_unused]] char const* class_name)
|
||||
{
|
||||
char const* name = nullptr;
|
||||
auto* block = static_cast<HeapBlock*>(cell_allocator.block_allocator().allocate_block(name));
|
||||
new (block) HeapBlock(heap, cell_allocator, cell_size);
|
||||
return NonnullOwnPtr<HeapBlock>(NonnullOwnPtr<HeapBlock>::Adopt, *block);
|
||||
}
|
||||
|
||||
HeapBlock::HeapBlock(Heap& heap, CellAllocator& cell_allocator, size_t cell_size)
|
||||
: HeapBlockBase(heap)
|
||||
, m_cell_allocator(cell_allocator)
|
||||
, m_cell_size(cell_size)
|
||||
{
|
||||
VERIFY(cell_size >= sizeof(FreelistEntry));
|
||||
ASAN_POISON_MEMORY_REGION(m_storage, block_size - sizeof(HeapBlock));
|
||||
}
|
||||
|
||||
void HeapBlock::deallocate(Cell* cell)
|
||||
{
|
||||
VERIFY(is_valid_cell_pointer(cell));
|
||||
VERIFY(!m_freelist || is_valid_cell_pointer(m_freelist));
|
||||
VERIFY(cell->state() == Cell::State::Live);
|
||||
VERIFY(!cell->is_marked());
|
||||
|
||||
cell->~Cell();
|
||||
auto* freelist_entry = new (cell) FreelistEntry();
|
||||
freelist_entry->set_state(Cell::State::Dead);
|
||||
freelist_entry->next = m_freelist;
|
||||
m_freelist = freelist_entry;
|
||||
|
||||
#ifdef HAS_ADDRESS_SANITIZER
|
||||
auto dword_after_freelist = round_up_to_power_of_two(reinterpret_cast<uintptr_t>(freelist_entry) + sizeof(FreelistEntry), 8);
|
||||
VERIFY((dword_after_freelist - reinterpret_cast<uintptr_t>(freelist_entry)) <= m_cell_size);
|
||||
VERIFY(m_cell_size >= sizeof(FreelistEntry));
|
||||
// We can't poision the cell tracking data, nor the FreeListEntry's vtable or next pointer
|
||||
// This means there's sizeof(FreelistEntry) data at the front of each cell that is always read/write
|
||||
// On x86_64, this ends up being 24 bytes due to the size of the FreeListEntry's vtable, while on x86, it's only 12 bytes.
|
||||
ASAN_POISON_MEMORY_REGION(reinterpret_cast<void*>(dword_after_freelist), m_cell_size - sizeof(FreelistEntry));
|
||||
#endif
|
||||
}
|
||||
|
||||
}
|
121
Libraries/LibGC/HeapBlock.h
Normal file
121
Libraries/LibGC/HeapBlock.h
Normal file
|
@ -0,0 +1,121 @@
|
|||
/*
|
||||
* Copyright (c) 2020, Andreas Kling <andreas@ladybird.org>
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <AK/IntrusiveList.h>
|
||||
#include <AK/Platform.h>
|
||||
#include <AK/StringView.h>
|
||||
#include <AK/Types.h>
|
||||
#include <LibGC/Cell.h>
|
||||
#include <LibGC/Forward.h>
|
||||
#include <LibGC/Internals.h>
|
||||
|
||||
#ifdef HAS_ADDRESS_SANITIZER
|
||||
# include <sanitizer/asan_interface.h>
|
||||
#endif
|
||||
|
||||
namespace GC {
|
||||
|
||||
class HeapBlock : public HeapBlockBase {
|
||||
AK_MAKE_NONCOPYABLE(HeapBlock);
|
||||
AK_MAKE_NONMOVABLE(HeapBlock);
|
||||
|
||||
public:
|
||||
using HeapBlockBase::block_size;
|
||||
static NonnullOwnPtr<HeapBlock> create_with_cell_size(Heap&, CellAllocator&, size_t cell_size, char const* class_name);
|
||||
|
||||
size_t cell_size() const { return m_cell_size; }
|
||||
size_t cell_count() const { return (block_size - sizeof(HeapBlock)) / m_cell_size; }
|
||||
bool is_full() const { return !has_lazy_freelist() && !m_freelist; }
|
||||
|
||||
ALWAYS_INLINE Cell* allocate()
|
||||
{
|
||||
Cell* allocated_cell = nullptr;
|
||||
if (m_freelist) {
|
||||
VERIFY(is_valid_cell_pointer(m_freelist));
|
||||
allocated_cell = exchange(m_freelist, m_freelist->next);
|
||||
} else if (has_lazy_freelist()) {
|
||||
allocated_cell = cell(m_next_lazy_freelist_index++);
|
||||
}
|
||||
|
||||
if (allocated_cell) {
|
||||
ASAN_UNPOISON_MEMORY_REGION(allocated_cell, m_cell_size);
|
||||
}
|
||||
return allocated_cell;
|
||||
}
|
||||
|
||||
void deallocate(Cell*);
|
||||
|
||||
template<typename Callback>
|
||||
void for_each_cell(Callback callback)
|
||||
{
|
||||
auto end = has_lazy_freelist() ? m_next_lazy_freelist_index : cell_count();
|
||||
for (size_t i = 0; i < end; ++i)
|
||||
callback(cell(i));
|
||||
}
|
||||
|
||||
template<Cell::State state, typename Callback>
|
||||
void for_each_cell_in_state(Callback callback)
|
||||
{
|
||||
for_each_cell([&](auto* cell) {
|
||||
if (cell->state() == state)
|
||||
callback(cell);
|
||||
});
|
||||
}
|
||||
|
||||
static HeapBlock* from_cell(Cell const* cell)
|
||||
{
|
||||
return static_cast<HeapBlock*>(HeapBlockBase::from_cell(cell));
|
||||
}
|
||||
|
||||
Cell* cell_from_possible_pointer(FlatPtr pointer)
|
||||
{
|
||||
if (pointer < reinterpret_cast<FlatPtr>(m_storage))
|
||||
return nullptr;
|
||||
size_t cell_index = (pointer - reinterpret_cast<FlatPtr>(m_storage)) / m_cell_size;
|
||||
auto end = has_lazy_freelist() ? m_next_lazy_freelist_index : cell_count();
|
||||
if (cell_index >= end)
|
||||
return nullptr;
|
||||
return cell(cell_index);
|
||||
}
|
||||
|
||||
bool is_valid_cell_pointer(Cell const* cell)
|
||||
{
|
||||
return cell_from_possible_pointer((FlatPtr)cell);
|
||||
}
|
||||
|
||||
IntrusiveListNode<HeapBlock> m_list_node;
|
||||
|
||||
CellAllocator& cell_allocator() { return m_cell_allocator; }
|
||||
|
||||
private:
|
||||
HeapBlock(Heap&, CellAllocator&, size_t cell_size);
|
||||
|
||||
bool has_lazy_freelist() const { return m_next_lazy_freelist_index < cell_count(); }
|
||||
|
||||
struct FreelistEntry final : public Cell {
|
||||
GC_CELL(FreelistEntry, Cell);
|
||||
|
||||
RawPtr<FreelistEntry> next;
|
||||
};
|
||||
|
||||
Cell* cell(size_t index)
|
||||
{
|
||||
return reinterpret_cast<Cell*>(&m_storage[index * cell_size()]);
|
||||
}
|
||||
|
||||
CellAllocator& m_cell_allocator;
|
||||
size_t m_cell_size { 0 };
|
||||
size_t m_next_lazy_freelist_index { 0 };
|
||||
Ptr<FreelistEntry> m_freelist;
|
||||
alignas(__BIGGEST_ALIGNMENT__) u8 m_storage[];
|
||||
|
||||
public:
|
||||
static constexpr size_t min_possible_cell_size = sizeof(FreelistEntry);
|
||||
};
|
||||
|
||||
}
|
28
Libraries/LibGC/HeapRoot.h
Normal file
28
Libraries/LibGC/HeapRoot.h
Normal file
|
@ -0,0 +1,28 @@
|
|||
/*
|
||||
* Copyright (c) 2023, Aliaksandr Kalenik <kalenik.aliaksandr@gmail.com>
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <AK/SourceLocation.h>
|
||||
|
||||
namespace GC {
|
||||
|
||||
struct HeapRoot {
|
||||
enum class Type {
|
||||
HeapFunctionCapturedPointer,
|
||||
Root,
|
||||
MarkedVector,
|
||||
ConservativeVector,
|
||||
RegisterPointer,
|
||||
StackPointer,
|
||||
VM,
|
||||
};
|
||||
|
||||
Type type;
|
||||
SourceLocation const* location { nullptr };
|
||||
};
|
||||
|
||||
}
|
53
Libraries/LibGC/Internals.h
Normal file
53
Libraries/LibGC/Internals.h
Normal file
|
@ -0,0 +1,53 @@
|
|||
/*
|
||||
* Copyright (c) 2020-2024, Andreas Kling <andreas@ladybird.org>
|
||||
* Copyright (c) 2020-2023, the SerenityOS developers.
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <AK/Types.h>
|
||||
#include <LibGC/Forward.h>
|
||||
|
||||
namespace GC {
|
||||
|
||||
class HeapBase {
|
||||
AK_MAKE_NONCOPYABLE(HeapBase);
|
||||
AK_MAKE_NONMOVABLE(HeapBase);
|
||||
|
||||
public:
|
||||
void* private_data() { return m_private_data; }
|
||||
|
||||
protected:
|
||||
explicit HeapBase(void* private_data)
|
||||
: m_private_data(private_data)
|
||||
{
|
||||
}
|
||||
|
||||
void* m_private_data;
|
||||
};
|
||||
|
||||
class HeapBlockBase {
|
||||
AK_MAKE_NONMOVABLE(HeapBlockBase);
|
||||
AK_MAKE_NONCOPYABLE(HeapBlockBase);
|
||||
|
||||
public:
|
||||
static size_t block_size;
|
||||
static HeapBlockBase* from_cell(Cell const* cell)
|
||||
{
|
||||
return reinterpret_cast<HeapBlockBase*>(bit_cast<FlatPtr>(cell) & ~(HeapBlockBase::block_size - 1));
|
||||
}
|
||||
|
||||
Heap& heap() { return m_heap; }
|
||||
|
||||
protected:
|
||||
HeapBlockBase(Heap& heap)
|
||||
: m_heap(heap)
|
||||
{
|
||||
}
|
||||
|
||||
Heap& m_heap;
|
||||
};
|
||||
|
||||
}
|
36
Libraries/LibGC/MarkedVector.cpp
Normal file
36
Libraries/LibGC/MarkedVector.cpp
Normal file
|
@ -0,0 +1,36 @@
|
|||
/*
|
||||
* Copyright (c) 2021, Andreas Kling <andreas@ladybird.org>
|
||||
* Copyright (c) 2022, Linus Groh <linusg@serenityos.org>
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*/
|
||||
|
||||
#include <LibGC/Heap.h>
|
||||
#include <LibGC/MarkedVector.h>
|
||||
|
||||
namespace GC {
|
||||
|
||||
MarkedVectorBase::MarkedVectorBase(Heap& heap)
|
||||
: m_heap(&heap)
|
||||
{
|
||||
m_heap->did_create_marked_vector({}, *this);
|
||||
}
|
||||
|
||||
MarkedVectorBase::~MarkedVectorBase()
|
||||
{
|
||||
m_heap->did_destroy_marked_vector({}, *this);
|
||||
}
|
||||
|
||||
MarkedVectorBase& MarkedVectorBase::operator=(MarkedVectorBase const& other)
|
||||
{
|
||||
if (m_heap != other.m_heap) {
|
||||
m_heap = other.m_heap;
|
||||
|
||||
// NOTE: IntrusiveList will remove this MarkedVectorBase from the old heap it was part of.
|
||||
m_heap->did_create_marked_vector({}, *this);
|
||||
}
|
||||
|
||||
return *this;
|
||||
}
|
||||
|
||||
}
|
81
Libraries/LibGC/MarkedVector.h
Normal file
81
Libraries/LibGC/MarkedVector.h
Normal file
|
@ -0,0 +1,81 @@
|
|||
/*
|
||||
* Copyright (c) 2021, Andreas Kling <andreas@ladybird.org>
|
||||
* Copyright (c) 2022, Linus Groh <linusg@serenityos.org>
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <AK/HashMap.h>
|
||||
#include <AK/IntrusiveList.h>
|
||||
#include <AK/Vector.h>
|
||||
#include <LibGC/Cell.h>
|
||||
#include <LibGC/Forward.h>
|
||||
#include <LibGC/HeapRoot.h>
|
||||
|
||||
namespace GC {
|
||||
|
||||
class MarkedVectorBase {
|
||||
public:
|
||||
virtual void gather_roots(HashMap<Cell*, GC::HeapRoot>&) const = 0;
|
||||
|
||||
protected:
|
||||
explicit MarkedVectorBase(Heap&);
|
||||
~MarkedVectorBase();
|
||||
|
||||
MarkedVectorBase& operator=(MarkedVectorBase const&);
|
||||
|
||||
Heap* m_heap { nullptr };
|
||||
IntrusiveListNode<MarkedVectorBase> m_list_node;
|
||||
|
||||
public:
|
||||
using List = IntrusiveList<&MarkedVectorBase::m_list_node>;
|
||||
};
|
||||
|
||||
template<typename T, size_t inline_capacity>
|
||||
class MarkedVector final
|
||||
: public MarkedVectorBase
|
||||
, public Vector<T, inline_capacity> {
|
||||
|
||||
public:
|
||||
explicit MarkedVector(Heap& heap)
|
||||
: MarkedVectorBase(heap)
|
||||
{
|
||||
}
|
||||
|
||||
virtual ~MarkedVector() = default;
|
||||
|
||||
MarkedVector(MarkedVector const& other)
|
||||
: MarkedVectorBase(*other.m_heap)
|
||||
, Vector<T, inline_capacity>(other)
|
||||
{
|
||||
}
|
||||
|
||||
MarkedVector(MarkedVector&& other)
|
||||
: MarkedVectorBase(*other.m_heap)
|
||||
, Vector<T, inline_capacity>(move(static_cast<Vector<T, inline_capacity>&>(other)))
|
||||
{
|
||||
}
|
||||
|
||||
MarkedVector& operator=(MarkedVector const& other)
|
||||
{
|
||||
Vector<T, inline_capacity>::operator=(other);
|
||||
MarkedVectorBase::operator=(other);
|
||||
return *this;
|
||||
}
|
||||
|
||||
virtual void gather_roots(HashMap<Cell*, GC::HeapRoot>& roots) const override
|
||||
{
|
||||
for (auto& value : *this) {
|
||||
if constexpr (IsBaseOf<NanBoxedValue, T>) {
|
||||
if (value.is_cell())
|
||||
roots.set(&const_cast<T&>(value).as_cell(), HeapRoot { .type = HeapRoot::Type::MarkedVector });
|
||||
} else {
|
||||
roots.set(value, HeapRoot { .type = HeapRoot::Type::MarkedVector });
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
}
|
120
Libraries/LibGC/NanBoxedValue.h
Normal file
120
Libraries/LibGC/NanBoxedValue.h
Normal file
|
@ -0,0 +1,120 @@
|
|||
/*
|
||||
* Copyright (c) 2024, Shannon Booth <shannon@serenityos.org>
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <AK/BitCast.h>
|
||||
#include <AK/Types.h>
|
||||
#include <LibGC/Cell.h>
|
||||
|
||||
namespace GC {
|
||||
|
||||
static_assert(sizeof(double) == 8);
|
||||
static_assert(sizeof(void*) == sizeof(double) || sizeof(void*) == sizeof(u32));
|
||||
// To make our Value representation compact we can use the fact that IEEE
|
||||
// doubles have a lot (2^52 - 2) of NaN bit patterns. The canonical form being
|
||||
// just 0x7FF8000000000000 i.e. sign = 0 exponent is all ones and the top most
|
||||
// bit of the mantissa set.
|
||||
static constexpr u64 CANON_NAN_BITS = bit_cast<u64>(__builtin_nan(""));
|
||||
static_assert(CANON_NAN_BITS == 0x7FF8000000000000);
|
||||
// (Unfortunately all the other values are valid so we have to convert any
|
||||
// incoming NaNs to this pattern although in practice it seems only the negative
|
||||
// version of these CANON_NAN_BITS)
|
||||
// +/- Infinity are represented by a full exponent but without any bits of the
|
||||
// mantissa set.
|
||||
static constexpr u64 POSITIVE_INFINITY_BITS = bit_cast<u64>(__builtin_huge_val());
|
||||
static constexpr u64 NEGATIVE_INFINITY_BITS = bit_cast<u64>(-__builtin_huge_val());
|
||||
static_assert(POSITIVE_INFINITY_BITS == 0x7FF0000000000000);
|
||||
static_assert(NEGATIVE_INFINITY_BITS == 0xFFF0000000000000);
|
||||
// However as long as any bit is set in the mantissa with the exponent of all
|
||||
// ones this value is a NaN, and it even ignores the sign bit.
|
||||
// (NOTE: we have to use __builtin_isnan here since some isnan implementations are not constexpr)
|
||||
static_assert(__builtin_isnan(bit_cast<double>(0x7FF0000000000001)));
|
||||
static_assert(__builtin_isnan(bit_cast<double>(0xFFF0000000040000)));
|
||||
// This means we can use all of these NaNs to store all other options for Value.
|
||||
// To make sure all of these other representations we use 0x7FF8 as the base top
|
||||
// 2 bytes which ensures the value is always a NaN.
|
||||
static constexpr u64 BASE_TAG = 0x7FF8;
|
||||
// This leaves the sign bit and the three lower bits for tagging a value and then
|
||||
// 48 bits of potential payload.
|
||||
// First the pointer backed types (Object, String etc.), to signify this category
|
||||
// and make stack scanning easier we use the sign bit (top most bit) of 1 to
|
||||
// signify that it is a pointer backed type.
|
||||
static constexpr u64 IS_CELL_BIT = 0x8000 | BASE_TAG;
|
||||
// On all current 64-bit systems this code runs pointer actually only use the
|
||||
// lowest 6 bytes which fits neatly into our NaN payload with the top two bytes
|
||||
// left over for marking it as a NaN and tagging the type.
|
||||
// Note that we do need to take care when extracting the pointer value but this
|
||||
// is explained in the extract_pointer method.
|
||||
|
||||
static constexpr u64 IS_CELL_PATTERN = 0xFFF8ULL;
|
||||
static constexpr u64 TAG_SHIFT = 48;
|
||||
static constexpr u64 TAG_EXTRACTION = 0xFFFF000000000000;
|
||||
static constexpr u64 SHIFTED_IS_CELL_PATTERN = IS_CELL_PATTERN << TAG_SHIFT;
|
||||
|
||||
class NanBoxedValue {
|
||||
public:
|
||||
bool is_cell() const { return (m_value.tag & IS_CELL_PATTERN) == IS_CELL_PATTERN; }
|
||||
|
||||
static constexpr FlatPtr extract_pointer_bits(u64 encoded)
|
||||
{
|
||||
#ifdef AK_ARCH_32_BIT
|
||||
// For 32-bit system the pointer fully fits so we can just return it directly.
|
||||
static_assert(sizeof(void*) == sizeof(u32));
|
||||
return static_cast<FlatPtr>(encoded & 0xffff'ffff);
|
||||
#elif ARCH(X86_64) || ARCH(RISCV64)
|
||||
// For x86_64 and riscv64 the top 16 bits should be sign extending the "real" top bit (47th).
|
||||
// So first shift the top 16 bits away then using the right shift it sign extends the top 16 bits.
|
||||
return static_cast<FlatPtr>((static_cast<i64>(encoded << 16)) >> 16);
|
||||
#elif ARCH(AARCH64) || ARCH(PPC64) || ARCH(PPC64LE)
|
||||
// For AArch64 the top 16 bits of the pointer should be zero.
|
||||
// For PPC64: all 64 bits can be used for pointers, however on Linux only
|
||||
// the lower 43 bits are used for user-space addresses, so
|
||||
// masking off the top 16 bits should match the rest of LibGC.
|
||||
return static_cast<FlatPtr>(encoded & 0xffff'ffff'ffffULL);
|
||||
#else
|
||||
# error "Unknown architecture. Don't know whether pointers need to be sign-extended."
|
||||
#endif
|
||||
}
|
||||
|
||||
template<typename PointerType>
|
||||
PointerType* extract_pointer() const
|
||||
{
|
||||
VERIFY(is_cell());
|
||||
return reinterpret_cast<PointerType*>(extract_pointer_bits(m_value.encoded));
|
||||
}
|
||||
|
||||
Cell& as_cell()
|
||||
{
|
||||
VERIFY(is_cell());
|
||||
return *extract_pointer<Cell>();
|
||||
}
|
||||
|
||||
Cell& as_cell() const
|
||||
{
|
||||
VERIFY(is_cell());
|
||||
return *extract_pointer<Cell>();
|
||||
}
|
||||
|
||||
bool is_nan() const
|
||||
{
|
||||
return m_value.encoded == CANON_NAN_BITS;
|
||||
}
|
||||
|
||||
protected:
|
||||
union {
|
||||
double as_double;
|
||||
struct {
|
||||
u64 payload : 48;
|
||||
u64 tag : 16;
|
||||
};
|
||||
u64 encoded;
|
||||
} m_value { .encoded = 0 };
|
||||
};
|
||||
|
||||
static_assert(sizeof(NanBoxedValue) == sizeof(double));
|
||||
|
||||
}
|
241
Libraries/LibGC/Ptr.h
Normal file
241
Libraries/LibGC/Ptr.h
Normal file
|
@ -0,0 +1,241 @@
|
|||
/*
|
||||
* Copyright (c) 2022, Andreas Kling <andreas@ladybird.org>
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <AK/Traits.h>
|
||||
#include <AK/Types.h>
|
||||
|
||||
namespace GC {
|
||||
|
||||
template<typename T>
|
||||
class Ptr;
|
||||
|
||||
template<typename T>
|
||||
class Ref {
|
||||
public:
|
||||
Ref() = delete;
|
||||
|
||||
Ref(T& ptr)
|
||||
: m_ptr(&ptr)
|
||||
{
|
||||
}
|
||||
|
||||
template<typename U>
|
||||
Ref(U& ptr)
|
||||
requires(IsConvertible<U*, T*>)
|
||||
: m_ptr(&static_cast<T&>(ptr))
|
||||
{
|
||||
}
|
||||
|
||||
template<typename U>
|
||||
Ref(Ref<U> const& other)
|
||||
requires(IsConvertible<U*, T*>)
|
||||
: m_ptr(other.ptr())
|
||||
{
|
||||
}
|
||||
|
||||
template<typename U>
|
||||
Ref& operator=(Ref<U> const& other)
|
||||
requires(IsConvertible<U*, T*>)
|
||||
{
|
||||
m_ptr = static_cast<T*>(other.ptr());
|
||||
return *this;
|
||||
}
|
||||
|
||||
Ref& operator=(T& other)
|
||||
{
|
||||
m_ptr = &other;
|
||||
return *this;
|
||||
}
|
||||
|
||||
template<typename U>
|
||||
Ref& operator=(U& other)
|
||||
requires(IsConvertible<U*, T*>)
|
||||
{
|
||||
m_ptr = &static_cast<T&>(other);
|
||||
return *this;
|
||||
}
|
||||
|
||||
RETURNS_NONNULL T* operator->() const { return m_ptr; }
|
||||
|
||||
[[nodiscard]] T& operator*() const { return *m_ptr; }
|
||||
|
||||
RETURNS_NONNULL T* ptr() const { return m_ptr; }
|
||||
|
||||
RETURNS_NONNULL operator T*() const { return m_ptr; }
|
||||
|
||||
operator T&() const { return *m_ptr; }
|
||||
|
||||
private:
|
||||
T* m_ptr { nullptr };
|
||||
};
|
||||
|
||||
template<typename T>
|
||||
class Ptr {
|
||||
public:
|
||||
constexpr Ptr() = default;
|
||||
|
||||
Ptr(T& ptr)
|
||||
: m_ptr(&ptr)
|
||||
{
|
||||
}
|
||||
|
||||
Ptr(T* ptr)
|
||||
: m_ptr(ptr)
|
||||
{
|
||||
}
|
||||
|
||||
template<typename U>
|
||||
Ptr(Ptr<U> const& other)
|
||||
requires(IsConvertible<U*, T*>)
|
||||
: m_ptr(other.ptr())
|
||||
{
|
||||
}
|
||||
|
||||
Ptr(Ref<T> const& other)
|
||||
: m_ptr(other.ptr())
|
||||
{
|
||||
}
|
||||
|
||||
template<typename U>
|
||||
Ptr(Ref<U> const& other)
|
||||
requires(IsConvertible<U*, T*>)
|
||||
: m_ptr(other.ptr())
|
||||
{
|
||||
}
|
||||
|
||||
Ptr(nullptr_t)
|
||||
: m_ptr(nullptr)
|
||||
{
|
||||
}
|
||||
|
||||
template<typename U>
|
||||
Ptr& operator=(Ptr<U> const& other)
|
||||
requires(IsConvertible<U*, T*>)
|
||||
{
|
||||
m_ptr = static_cast<T*>(other.ptr());
|
||||
return *this;
|
||||
}
|
||||
|
||||
Ptr& operator=(Ref<T> const& other)
|
||||
{
|
||||
m_ptr = other.ptr();
|
||||
return *this;
|
||||
}
|
||||
|
||||
template<typename U>
|
||||
Ptr& operator=(Ref<U> const& other)
|
||||
requires(IsConvertible<U*, T*>)
|
||||
{
|
||||
m_ptr = static_cast<T*>(other.ptr());
|
||||
return *this;
|
||||
}
|
||||
|
||||
Ptr& operator=(T& other)
|
||||
{
|
||||
m_ptr = &other;
|
||||
return *this;
|
||||
}
|
||||
|
||||
template<typename U>
|
||||
Ptr& operator=(U& other)
|
||||
requires(IsConvertible<U*, T*>)
|
||||
{
|
||||
m_ptr = &static_cast<T&>(other);
|
||||
return *this;
|
||||
}
|
||||
|
||||
Ptr& operator=(T* other)
|
||||
{
|
||||
m_ptr = other;
|
||||
return *this;
|
||||
}
|
||||
|
||||
template<typename U>
|
||||
Ptr& operator=(U* other)
|
||||
requires(IsConvertible<U*, T*>)
|
||||
{
|
||||
m_ptr = static_cast<T*>(other);
|
||||
return *this;
|
||||
}
|
||||
|
||||
T* operator->() const
|
||||
{
|
||||
ASSERT(m_ptr);
|
||||
return m_ptr;
|
||||
}
|
||||
|
||||
[[nodiscard]] T& operator*() const
|
||||
{
|
||||
ASSERT(m_ptr);
|
||||
return *m_ptr;
|
||||
}
|
||||
|
||||
T* ptr() const { return m_ptr; }
|
||||
|
||||
explicit operator bool() const { return !!m_ptr; }
|
||||
bool operator!() const { return !m_ptr; }
|
||||
|
||||
operator T*() const { return m_ptr; }
|
||||
|
||||
private:
|
||||
T* m_ptr { nullptr };
|
||||
};
|
||||
|
||||
// Non-Owning GC::Ptr
|
||||
template<typename T>
|
||||
using RawPtr = Ptr<T>;
|
||||
|
||||
// Non-Owning Ref
|
||||
template<typename T>
|
||||
using RawRef = Ref<T>;
|
||||
|
||||
template<typename T, typename U>
|
||||
inline bool operator==(Ptr<T> const& a, Ptr<U> const& b)
|
||||
{
|
||||
return a.ptr() == b.ptr();
|
||||
}
|
||||
|
||||
template<typename T, typename U>
|
||||
inline bool operator==(Ptr<T> const& a, Ref<U> const& b)
|
||||
{
|
||||
return a.ptr() == b.ptr();
|
||||
}
|
||||
|
||||
template<typename T, typename U>
|
||||
inline bool operator==(Ref<T> const& a, Ref<U> const& b)
|
||||
{
|
||||
return a.ptr() == b.ptr();
|
||||
}
|
||||
|
||||
template<typename T, typename U>
|
||||
inline bool operator==(Ref<T> const& a, Ptr<U> const& b)
|
||||
{
|
||||
return a.ptr() == b.ptr();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
namespace AK {
|
||||
|
||||
template<typename T>
|
||||
struct Traits<GC::Ptr<T>> : public DefaultTraits<GC::Ptr<T>> {
|
||||
static unsigned hash(GC::Ptr<T> const& value)
|
||||
{
|
||||
return Traits<T*>::hash(value.ptr());
|
||||
}
|
||||
};
|
||||
|
||||
template<typename T>
|
||||
struct Traits<GC::Ref<T>> : public DefaultTraits<GC::Ref<T>> {
|
||||
static unsigned hash(GC::Ref<T> const& value)
|
||||
{
|
||||
return Traits<T*>::hash(value.ptr());
|
||||
}
|
||||
};
|
||||
|
||||
}
|
25
Libraries/LibGC/Root.cpp
Normal file
25
Libraries/LibGC/Root.cpp
Normal file
|
@ -0,0 +1,25 @@
|
|||
/*
|
||||
* Copyright (c) 2020, Andreas Kling <andreas@ladybird.org>
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*/
|
||||
|
||||
#include <LibGC/Cell.h>
|
||||
#include <LibGC/Heap.h>
|
||||
#include <LibGC/Root.h>
|
||||
|
||||
namespace GC {
|
||||
|
||||
RootImpl::RootImpl(Cell* cell, SourceLocation location)
|
||||
: m_cell(cell)
|
||||
, m_location(location)
|
||||
{
|
||||
m_cell->heap().did_create_root({}, *this);
|
||||
}
|
||||
|
||||
RootImpl::~RootImpl()
|
||||
{
|
||||
m_cell->heap().did_destroy_root({}, *this);
|
||||
}
|
||||
|
||||
}
|
169
Libraries/LibGC/Root.h
Normal file
169
Libraries/LibGC/Root.h
Normal file
|
@ -0,0 +1,169 @@
|
|||
/*
|
||||
* Copyright (c) 2020, Andreas Kling <andreas@ladybird.org>
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <AK/Badge.h>
|
||||
#include <AK/IntrusiveList.h>
|
||||
#include <AK/Noncopyable.h>
|
||||
#include <AK/RefCounted.h>
|
||||
#include <AK/RefPtr.h>
|
||||
#include <AK/SourceLocation.h>
|
||||
#include <LibGC/Forward.h>
|
||||
#include <LibGC/Ptr.h>
|
||||
|
||||
namespace GC {
|
||||
|
||||
class RootImpl : public RefCounted<RootImpl> {
|
||||
AK_MAKE_NONCOPYABLE(RootImpl);
|
||||
AK_MAKE_NONMOVABLE(RootImpl);
|
||||
|
||||
public:
|
||||
~RootImpl();
|
||||
|
||||
Cell* cell() { return m_cell; }
|
||||
Cell const* cell() const { return m_cell; }
|
||||
|
||||
SourceLocation const& source_location() const { return m_location; }
|
||||
|
||||
private:
|
||||
template<class T>
|
||||
friend class Root;
|
||||
|
||||
explicit RootImpl(Cell*, SourceLocation location);
|
||||
Ptr<Cell> m_cell;
|
||||
SourceLocation m_location;
|
||||
|
||||
IntrusiveListNode<RootImpl> m_list_node;
|
||||
|
||||
public:
|
||||
using List = IntrusiveList<&RootImpl::m_list_node>;
|
||||
};
|
||||
|
||||
template<class T>
|
||||
class Root {
|
||||
public:
|
||||
Root() = default;
|
||||
|
||||
static Root create(T* cell, SourceLocation location = SourceLocation::current())
|
||||
{
|
||||
return Root(adopt_ref(*new RootImpl(const_cast<RemoveConst<T>*>(cell), location)));
|
||||
}
|
||||
|
||||
Root(T* cell, SourceLocation location = SourceLocation::current())
|
||||
{
|
||||
if (cell)
|
||||
m_impl = adopt_ref(*new RootImpl(cell, location));
|
||||
}
|
||||
|
||||
Root(T& cell, SourceLocation location = SourceLocation::current())
|
||||
: m_impl(adopt_ref(*new RootImpl(&cell, location)))
|
||||
{
|
||||
}
|
||||
|
||||
Root(Ptr<T> cell, SourceLocation location = SourceLocation::current())
|
||||
: Root(cell.ptr(), location)
|
||||
{
|
||||
}
|
||||
|
||||
Root(Ref<T> cell, SourceLocation location = SourceLocation::current())
|
||||
: Root(*cell, location)
|
||||
{
|
||||
}
|
||||
|
||||
T* cell() const
|
||||
{
|
||||
if (!m_impl)
|
||||
return nullptr;
|
||||
return static_cast<T*>(m_impl->cell());
|
||||
}
|
||||
|
||||
T* ptr() const
|
||||
{
|
||||
return cell();
|
||||
}
|
||||
|
||||
bool is_null() const
|
||||
{
|
||||
return m_impl.is_null();
|
||||
}
|
||||
|
||||
T* operator->() const
|
||||
{
|
||||
return cell();
|
||||
}
|
||||
|
||||
[[nodiscard]] T& operator*() const
|
||||
{
|
||||
return *cell();
|
||||
}
|
||||
|
||||
bool operator!() const
|
||||
{
|
||||
return !cell();
|
||||
}
|
||||
operator bool() const
|
||||
{
|
||||
return cell();
|
||||
}
|
||||
|
||||
operator T*() const { return cell(); }
|
||||
|
||||
private:
|
||||
explicit Root(NonnullRefPtr<RootImpl> impl)
|
||||
: m_impl(move(impl))
|
||||
{
|
||||
}
|
||||
|
||||
RefPtr<RootImpl> m_impl;
|
||||
};
|
||||
|
||||
template<class T>
|
||||
inline Root<T> make_root(T* cell, SourceLocation location = SourceLocation::current())
|
||||
{
|
||||
if (!cell)
|
||||
return Root<T> {};
|
||||
return Root<T>::create(cell, location);
|
||||
}
|
||||
|
||||
template<class T>
|
||||
inline Root<T> make_root(T& cell, SourceLocation location = SourceLocation::current())
|
||||
{
|
||||
return Root<T>::create(&cell, location);
|
||||
}
|
||||
|
||||
template<class T>
|
||||
inline Root<T> make_root(Ptr<T> cell, SourceLocation location = SourceLocation::current())
|
||||
{
|
||||
if (!cell)
|
||||
return Root<T> {};
|
||||
return Root<T>::create(cell.ptr(), location);
|
||||
}
|
||||
|
||||
template<class T>
|
||||
inline Root<T> make_root(Ref<T> cell, SourceLocation location = SourceLocation::current())
|
||||
{
|
||||
return Root<T>::create(cell.ptr(), location);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
namespace AK {
|
||||
|
||||
template<typename T>
|
||||
struct Traits<GC::Root<T>> : public DefaultTraits<GC::Root<T>> {
|
||||
static unsigned hash(GC::Root<T> const& handle) { return Traits<T>::hash(handle); }
|
||||
};
|
||||
|
||||
namespace Detail {
|
||||
template<typename T>
|
||||
inline constexpr bool IsHashCompatible<GC::Root<T>, T> = true;
|
||||
|
||||
template<typename T>
|
||||
inline constexpr bool IsHashCompatible<T, GC::Root<T>> = true;
|
||||
|
||||
}
|
||||
}
|
31
Libraries/LibGC/WeakContainer.cpp
Normal file
31
Libraries/LibGC/WeakContainer.cpp
Normal file
|
@ -0,0 +1,31 @@
|
|||
/*
|
||||
* Copyright (c) 2021, Idan Horowitz <idan.horowitz@serenityos.org>
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*/
|
||||
|
||||
#include <LibGC/Heap.h>
|
||||
#include <LibGC/WeakContainer.h>
|
||||
|
||||
namespace GC {
|
||||
|
||||
WeakContainer::WeakContainer(Heap& heap)
|
||||
: m_heap(heap)
|
||||
{
|
||||
m_heap.did_create_weak_container({}, *this);
|
||||
}
|
||||
|
||||
WeakContainer::~WeakContainer()
|
||||
{
|
||||
deregister();
|
||||
}
|
||||
|
||||
void WeakContainer::deregister()
|
||||
{
|
||||
if (!m_registered)
|
||||
return;
|
||||
m_heap.did_destroy_weak_container({}, *this);
|
||||
m_registered = false;
|
||||
}
|
||||
|
||||
}
|
34
Libraries/LibGC/WeakContainer.h
Normal file
34
Libraries/LibGC/WeakContainer.h
Normal file
|
@ -0,0 +1,34 @@
|
|||
/*
|
||||
* Copyright (c) 2021, Idan Horowitz <idan.horowitz@serenityos.org>
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <AK/IntrusiveList.h>
|
||||
#include <LibGC/Forward.h>
|
||||
|
||||
namespace GC {
|
||||
|
||||
class WeakContainer {
|
||||
public:
|
||||
explicit WeakContainer(Heap&);
|
||||
virtual ~WeakContainer();
|
||||
|
||||
virtual void remove_dead_cells(Badge<Heap>) = 0;
|
||||
|
||||
protected:
|
||||
void deregister();
|
||||
|
||||
private:
|
||||
bool m_registered { true };
|
||||
Heap& m_heap;
|
||||
|
||||
IntrusiveListNode<WeakContainer> m_list_node;
|
||||
|
||||
public:
|
||||
using List = IntrusiveList<&WeakContainer::m_list_node>;
|
||||
};
|
||||
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue