mirror of
https://github.com/LadybirdBrowser/ladybird.git
synced 2025-04-20 19:45:12 +00:00
AK: Remove awkward ByteBuffer construction modes (wrap & adopt)
ByteBuffer previously had a flag that determined whether it owned the bytes inside it or not (m_owned.) Owned ByteBuffers would free() on destruction and non-owned ones would not. This was a huge source of confusion and made it hard to reason about lifetimes since there were no compile-time clues about whether a buffer was owned or non-owned. The adopt mode was used at some point to take over ownership of a random malloc'ed buffer, but nothing was using it so this patch removes that as well.
This commit is contained in:
parent
b30acdb4b7
commit
6b2a178a7f
Notes:
sideshowbarker
2024-07-19 00:44:14 +09:00
Author: https://github.com/awesomekling Commit: https://github.com/SerenityOS/serenity/commit/6b2a178a7f3
1 changed files with 5 additions and 57 deletions
|
@ -41,8 +41,6 @@ public:
|
|||
static NonnullRefPtr<ByteBufferImpl> create_uninitialized(size_t size);
|
||||
static NonnullRefPtr<ByteBufferImpl> create_zeroed(size_t);
|
||||
static NonnullRefPtr<ByteBufferImpl> copy(const void*, size_t);
|
||||
static NonnullRefPtr<ByteBufferImpl> wrap(void*, size_t);
|
||||
static NonnullRefPtr<ByteBufferImpl> adopt(void*, size_t);
|
||||
|
||||
~ByteBufferImpl() { clear(); }
|
||||
|
||||
|
@ -50,8 +48,7 @@ public:
|
|||
{
|
||||
if (!m_data)
|
||||
return;
|
||||
if (m_owned)
|
||||
kfree(m_data);
|
||||
kfree(m_data);
|
||||
m_data = nullptr;
|
||||
}
|
||||
|
||||
|
@ -90,20 +87,12 @@ public:
|
|||
void grow(size_t size);
|
||||
|
||||
private:
|
||||
enum ConstructionMode {
|
||||
Uninitialized,
|
||||
Copy,
|
||||
Wrap,
|
||||
Adopt
|
||||
};
|
||||
explicit ByteBufferImpl(size_t); // For ConstructionMode=Uninitialized
|
||||
ByteBufferImpl(const void*, size_t, ConstructionMode); // For ConstructionMode=Copy
|
||||
ByteBufferImpl(void*, size_t, ConstructionMode); // For ConstructionMode=Wrap/Adopt
|
||||
explicit ByteBufferImpl(size_t);
|
||||
ByteBufferImpl(const void*, size_t);
|
||||
ByteBufferImpl() { }
|
||||
|
||||
u8* m_data { nullptr };
|
||||
size_t m_size { 0 };
|
||||
bool m_owned { false };
|
||||
};
|
||||
|
||||
class ByteBuffer {
|
||||
|
@ -134,11 +123,6 @@ public:
|
|||
static ByteBuffer create_uninitialized(size_t size) { return ByteBuffer(ByteBufferImpl::create_uninitialized(size)); }
|
||||
static ByteBuffer create_zeroed(size_t size) { return ByteBuffer(ByteBufferImpl::create_zeroed(size)); }
|
||||
static ByteBuffer copy(const void* data, size_t size) { return ByteBuffer(ByteBufferImpl::copy(data, size)); }
|
||||
// The const version of this method was removed because it was misleading, suggesting copy on write
|
||||
// functionality. If you really need the old behaviour, call ByteBuffer::wrap(const_cast<void*>(data), size)
|
||||
// manually. Writing to such a byte buffer invokes undefined behaviour.
|
||||
static ByteBuffer wrap(void* data, size_t size) { return ByteBuffer(ByteBufferImpl::wrap(data, size)); }
|
||||
static ByteBuffer adopt(void* data, size_t size) { return ByteBuffer(ByteBufferImpl::adopt(data, size)); }
|
||||
|
||||
~ByteBuffer() { clear(); }
|
||||
void clear() { m_impl = nullptr; }
|
||||
|
@ -194,17 +178,6 @@ public:
|
|||
m_impl->trim(size);
|
||||
}
|
||||
|
||||
ByteBuffer slice_view(size_t offset, size_t size) const
|
||||
{
|
||||
if (is_null())
|
||||
return {};
|
||||
|
||||
// I cannot hand you a slice I don't have
|
||||
ASSERT(offset + size <= this->size());
|
||||
|
||||
return wrap(const_cast<u8*>(offset_pointer(offset)), size);
|
||||
}
|
||||
|
||||
ByteBuffer slice(size_t offset, size_t size) const
|
||||
{
|
||||
if (is_null())
|
||||
|
@ -263,35 +236,20 @@ inline ByteBufferImpl::ByteBufferImpl(size_t size)
|
|||
{
|
||||
if (size != 0)
|
||||
m_data = static_cast<u8*>(kmalloc(size));
|
||||
m_owned = true;
|
||||
}
|
||||
|
||||
inline ByteBufferImpl::ByteBufferImpl(const void* data, size_t size, ConstructionMode mode)
|
||||
inline ByteBufferImpl::ByteBufferImpl(const void* data, size_t size)
|
||||
: m_size(size)
|
||||
{
|
||||
ASSERT(mode == Copy);
|
||||
if (size != 0) {
|
||||
m_data = static_cast<u8*>(kmalloc(size));
|
||||
__builtin_memcpy(m_data, data, size);
|
||||
}
|
||||
m_owned = true;
|
||||
}
|
||||
|
||||
inline ByteBufferImpl::ByteBufferImpl(void* data, size_t size, ConstructionMode mode)
|
||||
: m_data(static_cast<u8*>(data))
|
||||
, m_size(size)
|
||||
{
|
||||
if (mode == Adopt) {
|
||||
m_owned = true;
|
||||
} else if (mode == Wrap) {
|
||||
m_owned = false;
|
||||
}
|
||||
}
|
||||
|
||||
inline void ByteBufferImpl::grow(size_t size)
|
||||
{
|
||||
ASSERT(size > m_size);
|
||||
ASSERT(m_owned);
|
||||
if (size == 0) {
|
||||
if (m_data)
|
||||
kfree(m_data);
|
||||
|
@ -323,17 +281,7 @@ inline NonnullRefPtr<ByteBufferImpl> ByteBufferImpl::create_zeroed(size_t size)
|
|||
|
||||
inline NonnullRefPtr<ByteBufferImpl> ByteBufferImpl::copy(const void* data, size_t size)
|
||||
{
|
||||
return ::adopt(*new ByteBufferImpl(data, size, Copy));
|
||||
}
|
||||
|
||||
inline NonnullRefPtr<ByteBufferImpl> ByteBufferImpl::wrap(void* data, size_t size)
|
||||
{
|
||||
return ::adopt(*new ByteBufferImpl(data, size, Wrap));
|
||||
}
|
||||
|
||||
inline NonnullRefPtr<ByteBufferImpl> ByteBufferImpl::adopt(void* data, size_t size)
|
||||
{
|
||||
return ::adopt(*new ByteBufferImpl(data, size, Adopt));
|
||||
return ::adopt(*new ByteBufferImpl(data, size));
|
||||
}
|
||||
|
||||
inline const LogStream& operator<<(const LogStream& stream, const ByteBuffer& value)
|
||||
|
|
Loading…
Add table
Reference in a new issue