mirror of
https://github.com/LadybirdBrowser/ladybird.git
synced 2025-04-21 12:05:15 +00:00
Everywhere: Run clang-format
This commit is contained in:
parent
8639d8bc21
commit
d26aabff04
Notes:
sideshowbarker
2024-07-17 09:48:50 +09:00
Author: https://github.com/linusg Commit: https://github.com/SerenityOS/serenity/commit/d26aabff04 Pull-request: https://github.com/SerenityOS/serenity/pull/15654 Reviewed-by: https://github.com/ADKaster Reviewed-by: https://github.com/alimpfard
140 changed files with 1202 additions and 723 deletions
18
AK/Array.h
18
AK/Array.h
|
@ -37,8 +37,16 @@ struct Array {
|
|||
[[nodiscard]] constexpr T const& first() const { return at(0); }
|
||||
[[nodiscard]] constexpr T& first() { return at(0); }
|
||||
|
||||
[[nodiscard]] constexpr T const& last() const requires(Size > 0) { return at(Size - 1); }
|
||||
[[nodiscard]] constexpr T& last() requires(Size > 0) { return at(Size - 1); }
|
||||
[[nodiscard]] constexpr T const& last() const
|
||||
requires(Size > 0)
|
||||
{
|
||||
return at(Size - 1);
|
||||
}
|
||||
[[nodiscard]] constexpr T& last()
|
||||
requires(Size > 0)
|
||||
{
|
||||
return at(Size - 1);
|
||||
}
|
||||
|
||||
[[nodiscard]] constexpr bool is_empty() const { return size() == 0; }
|
||||
|
||||
|
@ -68,7 +76,8 @@ struct Array {
|
|||
return Size;
|
||||
}
|
||||
|
||||
[[nodiscard]] constexpr T max() const requires(requires(T x, T y) { x < y; })
|
||||
[[nodiscard]] constexpr T max() const
|
||||
requires(requires(T x, T y) { x < y; })
|
||||
{
|
||||
static_assert(Size > 0, "No values to max() over");
|
||||
|
||||
|
@ -78,7 +87,8 @@ struct Array {
|
|||
return value;
|
||||
}
|
||||
|
||||
[[nodiscard]] constexpr T min() const requires(requires(T x, T y) { x > y; })
|
||||
[[nodiscard]] constexpr T min() const
|
||||
requires(requires(T x, T y) { x > y; })
|
||||
{
|
||||
static_assert(Size > 0, "No values to min() over");
|
||||
|
||||
|
|
|
@ -13,7 +13,7 @@
|
|||
# ifndef NDEBUG
|
||||
# define VERIFY assert
|
||||
# else
|
||||
# define __stringify_helper(x) # x
|
||||
# define __stringify_helper(x) #x
|
||||
# define __stringify(x) __stringify_helper(x)
|
||||
extern "C" __attribute__((noreturn)) void ak_verification_failed(char const*);
|
||||
# define VERIFY(expr) \
|
||||
|
|
40
AK/Atomic.h
40
AK/Atomic.h
|
@ -29,25 +29,25 @@ static inline void full_memory_barrier() noexcept
|
|||
}
|
||||
|
||||
template<typename T>
|
||||
static inline T atomic_exchange(volatile T* var, T desired, MemoryOrder order = memory_order_seq_cst) noexcept
|
||||
static inline T atomic_exchange(T volatile* var, T desired, MemoryOrder order = memory_order_seq_cst) noexcept
|
||||
{
|
||||
return __atomic_exchange_n(var, desired, order);
|
||||
}
|
||||
|
||||
template<typename T, typename V = RemoveVolatile<T>>
|
||||
static inline V* atomic_exchange(volatile T** var, V* desired, MemoryOrder order = memory_order_seq_cst) noexcept
|
||||
static inline V* atomic_exchange(T volatile** var, V* desired, MemoryOrder order = memory_order_seq_cst) noexcept
|
||||
{
|
||||
return __atomic_exchange_n(var, desired, order);
|
||||
}
|
||||
|
||||
template<typename T, typename V = RemoveVolatile<T>>
|
||||
static inline V* atomic_exchange(volatile T** var, std::nullptr_t, MemoryOrder order = memory_order_seq_cst) noexcept
|
||||
static inline V* atomic_exchange(T volatile** var, std::nullptr_t, MemoryOrder order = memory_order_seq_cst) noexcept
|
||||
{
|
||||
return __atomic_exchange_n(const_cast<V**>(var), nullptr, order);
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
[[nodiscard]] static inline bool atomic_compare_exchange_strong(volatile T* var, T& expected, T desired, MemoryOrder order = memory_order_seq_cst) noexcept
|
||||
[[nodiscard]] static inline bool atomic_compare_exchange_strong(T volatile* var, T& expected, T desired, MemoryOrder order = memory_order_seq_cst) noexcept
|
||||
{
|
||||
if (order == memory_order_acq_rel || order == memory_order_release)
|
||||
return __atomic_compare_exchange_n(var, &expected, desired, false, memory_order_release, memory_order_acquire);
|
||||
|
@ -55,7 +55,7 @@ template<typename T>
|
|||
}
|
||||
|
||||
template<typename T, typename V = RemoveVolatile<T>>
|
||||
[[nodiscard]] static inline bool atomic_compare_exchange_strong(volatile T** var, V*& expected, V* desired, MemoryOrder order = memory_order_seq_cst) noexcept
|
||||
[[nodiscard]] static inline bool atomic_compare_exchange_strong(T volatile** var, V*& expected, V* desired, MemoryOrder order = memory_order_seq_cst) noexcept
|
||||
{
|
||||
if (order == memory_order_acq_rel || order == memory_order_release)
|
||||
return __atomic_compare_exchange_n(var, &expected, desired, false, memory_order_release, memory_order_acquire);
|
||||
|
@ -63,7 +63,7 @@ template<typename T, typename V = RemoveVolatile<T>>
|
|||
}
|
||||
|
||||
template<typename T, typename V = RemoveVolatile<T>>
|
||||
[[nodiscard]] static inline bool atomic_compare_exchange_strong(volatile T** var, V*& expected, std::nullptr_t, MemoryOrder order = memory_order_seq_cst) noexcept
|
||||
[[nodiscard]] static inline bool atomic_compare_exchange_strong(T volatile** var, V*& expected, std::nullptr_t, MemoryOrder order = memory_order_seq_cst) noexcept
|
||||
{
|
||||
if (order == memory_order_acq_rel || order == memory_order_release)
|
||||
return __atomic_compare_exchange_n(const_cast<V**>(var), &expected, nullptr, false, memory_order_release, memory_order_acquire);
|
||||
|
@ -71,67 +71,67 @@ template<typename T, typename V = RemoveVolatile<T>>
|
|||
}
|
||||
|
||||
template<typename T>
|
||||
static inline T atomic_fetch_add(volatile T* var, T val, MemoryOrder order = memory_order_seq_cst) noexcept
|
||||
static inline T atomic_fetch_add(T volatile* var, T val, MemoryOrder order = memory_order_seq_cst) noexcept
|
||||
{
|
||||
return __atomic_fetch_add(var, val, order);
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
static inline T atomic_fetch_sub(volatile T* var, T val, MemoryOrder order = memory_order_seq_cst) noexcept
|
||||
static inline T atomic_fetch_sub(T volatile* var, T val, MemoryOrder order = memory_order_seq_cst) noexcept
|
||||
{
|
||||
return __atomic_fetch_sub(var, val, order);
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
static inline T atomic_fetch_and(volatile T* var, T val, MemoryOrder order = memory_order_seq_cst) noexcept
|
||||
static inline T atomic_fetch_and(T volatile* var, T val, MemoryOrder order = memory_order_seq_cst) noexcept
|
||||
{
|
||||
return __atomic_fetch_and(var, val, order);
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
static inline T atomic_fetch_or(volatile T* var, T val, MemoryOrder order = memory_order_seq_cst) noexcept
|
||||
static inline T atomic_fetch_or(T volatile* var, T val, MemoryOrder order = memory_order_seq_cst) noexcept
|
||||
{
|
||||
return __atomic_fetch_or(var, val, order);
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
static inline T atomic_fetch_xor(volatile T* var, T val, MemoryOrder order = memory_order_seq_cst) noexcept
|
||||
static inline T atomic_fetch_xor(T volatile* var, T val, MemoryOrder order = memory_order_seq_cst) noexcept
|
||||
{
|
||||
return __atomic_fetch_xor(var, val, order);
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
static inline T atomic_load(volatile T* var, MemoryOrder order = memory_order_seq_cst) noexcept
|
||||
static inline T atomic_load(T volatile* var, MemoryOrder order = memory_order_seq_cst) noexcept
|
||||
{
|
||||
return __atomic_load_n(var, order);
|
||||
}
|
||||
|
||||
template<typename T, typename V = RemoveVolatile<T>>
|
||||
static inline V* atomic_load(volatile T** var, MemoryOrder order = memory_order_seq_cst) noexcept
|
||||
static inline V* atomic_load(T volatile** var, MemoryOrder order = memory_order_seq_cst) noexcept
|
||||
{
|
||||
return __atomic_load_n(const_cast<V**>(var), order);
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
static inline void atomic_store(volatile T* var, T desired, MemoryOrder order = memory_order_seq_cst) noexcept
|
||||
static inline void atomic_store(T volatile* var, T desired, MemoryOrder order = memory_order_seq_cst) noexcept
|
||||
{
|
||||
__atomic_store_n(var, desired, order);
|
||||
}
|
||||
|
||||
template<typename T, typename V = RemoveVolatile<T>>
|
||||
static inline void atomic_store(volatile T** var, V* desired, MemoryOrder order = memory_order_seq_cst) noexcept
|
||||
static inline void atomic_store(T volatile** var, V* desired, MemoryOrder order = memory_order_seq_cst) noexcept
|
||||
{
|
||||
__atomic_store_n(var, desired, order);
|
||||
}
|
||||
|
||||
template<typename T, typename V = RemoveVolatile<T>>
|
||||
static inline void atomic_store(volatile T** var, std::nullptr_t, MemoryOrder order = memory_order_seq_cst) noexcept
|
||||
static inline void atomic_store(T volatile** var, std::nullptr_t, MemoryOrder order = memory_order_seq_cst) noexcept
|
||||
{
|
||||
__atomic_store_n(const_cast<V**>(var), nullptr, order);
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
static inline bool atomic_is_lock_free(volatile T* ptr = nullptr) noexcept
|
||||
static inline bool atomic_is_lock_free(T volatile* ptr = nullptr) noexcept
|
||||
{
|
||||
return __atomic_is_lock_free(sizeof(T), ptr);
|
||||
}
|
||||
|
@ -156,7 +156,7 @@ public:
|
|||
{
|
||||
}
|
||||
|
||||
volatile T* ptr() noexcept
|
||||
T volatile* ptr() noexcept
|
||||
{
|
||||
return &m_value;
|
||||
}
|
||||
|
@ -225,7 +225,7 @@ public:
|
|||
{
|
||||
}
|
||||
|
||||
volatile T* ptr() noexcept
|
||||
T volatile* ptr() noexcept
|
||||
{
|
||||
return &m_value;
|
||||
}
|
||||
|
@ -356,7 +356,7 @@ public:
|
|||
{
|
||||
}
|
||||
|
||||
volatile T** ptr() noexcept
|
||||
T volatile** ptr() noexcept
|
||||
{
|
||||
return &m_value;
|
||||
}
|
||||
|
|
|
@ -52,13 +52,13 @@ public:
|
|||
return m_elements[index].value;
|
||||
}
|
||||
|
||||
[[nodiscard]] const V& peek_min() const
|
||||
[[nodiscard]] V const& peek_min() const
|
||||
{
|
||||
VERIFY(!is_empty());
|
||||
return m_elements[0].value;
|
||||
}
|
||||
|
||||
[[nodiscard]] const K& peek_min_key() const
|
||||
[[nodiscard]] K const& peek_min_key() const
|
||||
{
|
||||
VERIFY(!is_empty());
|
||||
return m_elements[0].key;
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
namespace AK {
|
||||
|
||||
template<typename T, typename U>
|
||||
[[nodiscard]] constexpr inline T bit_cast(const U& a)
|
||||
[[nodiscard]] constexpr inline T bit_cast(U const& a)
|
||||
{
|
||||
#if (__has_builtin(__builtin_bit_cast))
|
||||
return __builtin_bit_cast(T, a);
|
||||
|
|
|
@ -27,9 +27,9 @@ namespace AK::Format::Detail {
|
|||
template<typename T, size_t Size>
|
||||
struct Array {
|
||||
constexpr static size_t size() { return Size; }
|
||||
constexpr const T& operator[](size_t index) const { return __data[index]; }
|
||||
constexpr T const& operator[](size_t index) const { return __data[index]; }
|
||||
constexpr T& operator[](size_t index) { return __data[index]; }
|
||||
using ConstIterator = SimpleIterator<const Array, const T>;
|
||||
using ConstIterator = SimpleIterator<const Array, T const>;
|
||||
using Iterator = SimpleIterator<Array, T>;
|
||||
|
||||
constexpr ConstIterator begin() const { return ConstIterator::begin(*this); }
|
||||
|
@ -155,7 +155,8 @@ struct CheckedFormatString {
|
|||
}
|
||||
|
||||
template<typename T>
|
||||
CheckedFormatString(const T& unchecked_fmt) requires(requires(T t) { StringView { t }; })
|
||||
CheckedFormatString(T const& unchecked_fmt)
|
||||
requires(requires(T t) { StringView { t }; })
|
||||
: m_string(unchecked_fmt)
|
||||
{
|
||||
}
|
||||
|
|
|
@ -63,11 +63,11 @@ public:
|
|||
return value;
|
||||
}
|
||||
|
||||
const T& at(size_t index) const { return elements()[(m_head + index) % Capacity]; }
|
||||
T const& at(size_t index) const { return elements()[(m_head + index) % Capacity]; }
|
||||
T& at(size_t index) { return elements()[(m_head + index) % Capacity]; }
|
||||
|
||||
const T& first() const { return at(0); }
|
||||
const T& last() const { return at(size() - 1); }
|
||||
T const& first() const { return at(0); }
|
||||
T const& last() const { return at(size() - 1); }
|
||||
|
||||
class ConstIterator {
|
||||
public:
|
||||
|
@ -78,7 +78,7 @@ public:
|
|||
return *this;
|
||||
}
|
||||
|
||||
const T& operator*() const { return m_queue.at(m_index); }
|
||||
T const& operator*() const { return m_queue.at(m_index); }
|
||||
|
||||
private:
|
||||
friend class CircularQueue;
|
||||
|
@ -123,7 +123,7 @@ public:
|
|||
|
||||
protected:
|
||||
T* elements() { return reinterpret_cast<T*>(m_storage); }
|
||||
const T* elements() const { return reinterpret_cast<const T*>(m_storage); }
|
||||
T const* elements() const { return reinterpret_cast<T const*>(m_storage); }
|
||||
|
||||
friend class ConstIterator;
|
||||
alignas(T) u8 m_storage[sizeof(T) * Capacity];
|
||||
|
|
26
AK/Complex.h
26
AK/Complex.h
|
@ -69,7 +69,7 @@ public:
|
|||
}
|
||||
|
||||
template<AK::Concepts::Arithmetic U>
|
||||
constexpr Complex<T>& operator=(const U& x)
|
||||
constexpr Complex<T>& operator=(U const& x)
|
||||
{
|
||||
m_real = x;
|
||||
m_imag = 0;
|
||||
|
@ -85,7 +85,7 @@ public:
|
|||
}
|
||||
|
||||
template<AK::Concepts::Arithmetic U>
|
||||
constexpr Complex<T> operator+=(const U& x)
|
||||
constexpr Complex<T> operator+=(U const& x)
|
||||
{
|
||||
m_real += x.real();
|
||||
return *this;
|
||||
|
@ -100,7 +100,7 @@ public:
|
|||
}
|
||||
|
||||
template<AK::Concepts::Arithmetic U>
|
||||
constexpr Complex<T> operator-=(const U& x)
|
||||
constexpr Complex<T> operator-=(U const& x)
|
||||
{
|
||||
m_real -= x.real();
|
||||
return *this;
|
||||
|
@ -116,7 +116,7 @@ public:
|
|||
}
|
||||
|
||||
template<AK::Concepts::Arithmetic U>
|
||||
constexpr Complex<T> operator*=(const U& x)
|
||||
constexpr Complex<T> operator*=(U const& x)
|
||||
{
|
||||
m_real *= x;
|
||||
m_imag *= x;
|
||||
|
@ -134,7 +134,7 @@ public:
|
|||
}
|
||||
|
||||
template<AK::Concepts::Arithmetic U>
|
||||
constexpr Complex<T> operator/=(const U& x)
|
||||
constexpr Complex<T> operator/=(U const& x)
|
||||
{
|
||||
m_real /= x;
|
||||
m_imag /= x;
|
||||
|
@ -150,7 +150,7 @@ public:
|
|||
}
|
||||
|
||||
template<AK::Concepts::Arithmetic U>
|
||||
constexpr Complex<T> operator+(const U& a)
|
||||
constexpr Complex<T> operator+(U const& a)
|
||||
{
|
||||
Complex<T> x = *this;
|
||||
x += a;
|
||||
|
@ -166,7 +166,7 @@ public:
|
|||
}
|
||||
|
||||
template<AK::Concepts::Arithmetic U>
|
||||
constexpr Complex<T> operator-(const U& a)
|
||||
constexpr Complex<T> operator-(U const& a)
|
||||
{
|
||||
Complex<T> x = *this;
|
||||
x -= a;
|
||||
|
@ -182,7 +182,7 @@ public:
|
|||
}
|
||||
|
||||
template<AK::Concepts::Arithmetic U>
|
||||
constexpr Complex<T> operator*(const U& a)
|
||||
constexpr Complex<T> operator*(U const& a)
|
||||
{
|
||||
Complex<T> x = *this;
|
||||
x *= a;
|
||||
|
@ -198,7 +198,7 @@ public:
|
|||
}
|
||||
|
||||
template<AK::Concepts::Arithmetic U>
|
||||
constexpr Complex<T> operator/(const U& a)
|
||||
constexpr Complex<T> operator/(U const& a)
|
||||
{
|
||||
Complex<T> x = *this;
|
||||
x /= a;
|
||||
|
@ -228,7 +228,7 @@ private:
|
|||
|
||||
// reverse associativity operators for scalars
|
||||
template<AK::Concepts::Arithmetic T, AK::Concepts::Arithmetic U>
|
||||
constexpr Complex<T> operator+(const U& b, Complex<T> const& a)
|
||||
constexpr Complex<T> operator+(U const& b, Complex<T> const& a)
|
||||
{
|
||||
Complex<T> x = a;
|
||||
x += b;
|
||||
|
@ -236,7 +236,7 @@ constexpr Complex<T> operator+(const U& b, Complex<T> const& a)
|
|||
}
|
||||
|
||||
template<AK::Concepts::Arithmetic T, AK::Concepts::Arithmetic U>
|
||||
constexpr Complex<T> operator-(const U& b, Complex<T> const& a)
|
||||
constexpr Complex<T> operator-(U const& b, Complex<T> const& a)
|
||||
{
|
||||
Complex<T> x = a;
|
||||
x -= b;
|
||||
|
@ -244,7 +244,7 @@ constexpr Complex<T> operator-(const U& b, Complex<T> const& a)
|
|||
}
|
||||
|
||||
template<AK::Concepts::Arithmetic T, AK::Concepts::Arithmetic U>
|
||||
constexpr Complex<T> operator*(const U& b, Complex<T> const& a)
|
||||
constexpr Complex<T> operator*(U const& b, Complex<T> const& a)
|
||||
{
|
||||
Complex<T> x = a;
|
||||
x *= b;
|
||||
|
@ -252,7 +252,7 @@ constexpr Complex<T> operator*(const U& b, Complex<T> const& a)
|
|||
}
|
||||
|
||||
template<AK::Concepts::Arithmetic T, AK::Concepts::Arithmetic U>
|
||||
constexpr Complex<T> operator/(const U& b, Complex<T> const& a)
|
||||
constexpr Complex<T> operator/(U const& b, Complex<T> const& a)
|
||||
{
|
||||
Complex<T> x = a;
|
||||
x /= b;
|
||||
|
|
|
@ -59,8 +59,16 @@ struct DisjointIterator {
|
|||
return &other.m_chunks == &m_chunks && other.m_index_in_chunk == m_index_in_chunk && other.m_chunk_index == m_chunk_index;
|
||||
}
|
||||
|
||||
auto& operator*() requires(!IsConst) { return m_chunks[m_chunk_index][m_index_in_chunk]; }
|
||||
auto* operator->() requires(!IsConst) { return &m_chunks[m_chunk_index][m_index_in_chunk]; }
|
||||
auto& operator*()
|
||||
requires(!IsConst)
|
||||
{
|
||||
return m_chunks[m_chunk_index][m_index_in_chunk];
|
||||
}
|
||||
auto* operator->()
|
||||
requires(!IsConst)
|
||||
{
|
||||
return &m_chunks[m_chunk_index][m_index_in_chunk];
|
||||
}
|
||||
auto const& operator*() const { return m_chunks[m_chunk_index][m_index_in_chunk]; }
|
||||
auto const* operator->() const { return &m_chunks[m_chunk_index][m_index_in_chunk]; }
|
||||
|
||||
|
|
|
@ -75,7 +75,7 @@ public:
|
|||
VERIFY(m_head);
|
||||
return m_head->value;
|
||||
}
|
||||
[[nodiscard]] const T& first() const
|
||||
[[nodiscard]] T const& first() const
|
||||
{
|
||||
VERIFY(m_head);
|
||||
return m_head->value;
|
||||
|
@ -85,7 +85,7 @@ public:
|
|||
VERIFY(m_head);
|
||||
return m_tail->value;
|
||||
}
|
||||
[[nodiscard]] const T& last() const
|
||||
[[nodiscard]] T const& last() const
|
||||
{
|
||||
VERIFY(m_head);
|
||||
return m_tail->value;
|
||||
|
@ -148,7 +148,7 @@ public:
|
|||
}
|
||||
#endif
|
||||
|
||||
[[nodiscard]] bool contains_slow(const T& value) const
|
||||
[[nodiscard]] bool contains_slow(T const& value) const
|
||||
{
|
||||
return find(value) != end();
|
||||
}
|
||||
|
@ -158,17 +158,17 @@ public:
|
|||
Iterator begin() { return Iterator(m_head); }
|
||||
Iterator end() { return Iterator::universal_end(); }
|
||||
|
||||
using ConstIterator = DoublyLinkedListIterator<const DoublyLinkedList, const T>;
|
||||
using ConstIterator = DoublyLinkedListIterator<const DoublyLinkedList, T const>;
|
||||
friend ConstIterator;
|
||||
ConstIterator begin() const { return ConstIterator(m_head); }
|
||||
ConstIterator end() const { return ConstIterator::universal_end(); }
|
||||
|
||||
ConstIterator find(const T& value) const
|
||||
ConstIterator find(T const& value) const
|
||||
{
|
||||
return AK::find(begin(), end(), value);
|
||||
}
|
||||
|
||||
Iterator find(const T& value)
|
||||
Iterator find(T const& value)
|
||||
{
|
||||
return AK::find(begin(), end(), value);
|
||||
}
|
||||
|
|
|
@ -75,13 +75,15 @@ private:
|
|||
template<typename T, typename ErrorType>
|
||||
class [[nodiscard]] ErrorOr {
|
||||
public:
|
||||
ErrorOr() requires(IsSame<T, Empty>)
|
||||
ErrorOr()
|
||||
requires(IsSame<T, Empty>)
|
||||
: m_value_or_error(Empty {})
|
||||
{
|
||||
}
|
||||
|
||||
template<typename U>
|
||||
ALWAYS_INLINE ErrorOr(U&& value) requires(!IsSame<RemoveCVReference<U>, ErrorOr<T, ErrorType>>)
|
||||
ALWAYS_INLINE ErrorOr(U&& value)
|
||||
requires(!IsSame<RemoveCVReference<U>, ErrorOr<T, ErrorType>>)
|
||||
: m_value_or_error(forward<U>(value))
|
||||
{
|
||||
}
|
||||
|
|
|
@ -30,7 +30,8 @@ template<typename TEndIterator, IteratorPairWith<TEndIterator> TIterator, typena
|
|||
}
|
||||
|
||||
template<typename TEndIterator, IteratorPairWith<TEndIterator> TIterator, typename T>
|
||||
[[nodiscard]] constexpr size_t find_index(TIterator first, TEndIterator last, T const& value) requires(requires(TIterator it) { it.index(); })
|
||||
[[nodiscard]] constexpr size_t find_index(TIterator first, TEndIterator last, T const& value)
|
||||
requires(requires(TIterator it) { it.index(); })
|
||||
{
|
||||
return find_if(first, last, [&](auto const& v) { return Traits<T>::equals(value, v); }).index();
|
||||
}
|
||||
|
|
|
@ -157,12 +157,14 @@ public:
|
|||
return y;
|
||||
}
|
||||
|
||||
constexpr bool signbit() const requires(IsSigned<Underlying>)
|
||||
constexpr bool signbit() const
|
||||
requires(IsSigned<Underlying>)
|
||||
{
|
||||
return m_value >> (sizeof(Underlying) * 8 - 1);
|
||||
}
|
||||
|
||||
constexpr This operator-() const requires(IsSigned<Underlying>)
|
||||
constexpr This operator-() const
|
||||
requires(IsSigned<Underlying>)
|
||||
{
|
||||
return create_raw(-m_value);
|
||||
}
|
||||
|
|
|
@ -90,7 +90,7 @@ public:
|
|||
static const size_t mantissabits = M;
|
||||
|
||||
template<typename T>
|
||||
requires(IsIntegral<T>&& IsUnsigned<T> && sizeof(T) <= 8) constexpr FloatingPointBits(T bits)
|
||||
requires(IsIntegral<T> && IsUnsigned<T> && sizeof(T) <= 8) constexpr FloatingPointBits(T bits)
|
||||
: m_bits(bits)
|
||||
{
|
||||
}
|
||||
|
@ -105,8 +105,16 @@ public:
|
|||
{
|
||||
}
|
||||
|
||||
double as_double() const requires(S == 1 && E == 11 && M == 52) { return bit_cast<double>(m_bits); }
|
||||
float as_float() const requires(S == 1 && E == 8 && M == 23) { return bit_cast<float>(static_cast<u32>(m_bits)); }
|
||||
double as_double() const
|
||||
requires(S == 1 && E == 11 && M == 52)
|
||||
{
|
||||
return bit_cast<double>(m_bits);
|
||||
}
|
||||
float as_float() const
|
||||
requires(S == 1 && E == 8 && M == 23)
|
||||
{
|
||||
return bit_cast<float>(static_cast<u32>(m_bits));
|
||||
}
|
||||
u64 bits() const { return m_bits; }
|
||||
|
||||
private:
|
||||
|
|
|
@ -20,7 +20,7 @@ namespace AK {
|
|||
// at https://nigeltao.github.io/blog/2020/eisel-lemire.html
|
||||
|
||||
template<typename T>
|
||||
concept ParseableFloatingPoint = IsFloatingPoint<T> &&(sizeof(T) == sizeof(u32) || sizeof(T) == sizeof(u64));
|
||||
concept ParseableFloatingPoint = IsFloatingPoint<T> && (sizeof(T) == sizeof(u32) || sizeof(T) == sizeof(u64));
|
||||
|
||||
template<ParseableFloatingPoint T>
|
||||
struct FloatingPointInfo {
|
||||
|
|
14
AK/Format.h
14
AK/Format.h
|
@ -239,13 +239,13 @@ private:
|
|||
|
||||
class TypeErasedFormatParams {
|
||||
public:
|
||||
Span<const TypeErasedParameter> parameters() const { return m_parameters; }
|
||||
Span<TypeErasedParameter const> parameters() const { return m_parameters; }
|
||||
|
||||
void set_parameters(Span<const TypeErasedParameter> parameters) { m_parameters = parameters; }
|
||||
void set_parameters(Span<TypeErasedParameter const> parameters) { m_parameters = parameters; }
|
||||
size_t take_next_index() { return m_next_index++; }
|
||||
|
||||
private:
|
||||
Span<const TypeErasedParameter> m_parameters;
|
||||
Span<TypeErasedParameter const> m_parameters;
|
||||
size_t m_next_index { 0 };
|
||||
};
|
||||
|
||||
|
@ -255,7 +255,7 @@ ErrorOr<void> __format_value(TypeErasedFormatParams& params, FormatBuilder& buil
|
|||
Formatter<T> formatter;
|
||||
|
||||
formatter.parse(params, parser);
|
||||
return formatter.format(builder, *static_cast<const T*>(value));
|
||||
return formatter.format(builder, *static_cast<T const*>(value));
|
||||
}
|
||||
|
||||
template<typename... Parameters>
|
||||
|
@ -624,15 +624,15 @@ void critical_dmesgln(CheckedFormatString<Parameters...>&& fmt, Parameters const
|
|||
template<typename T>
|
||||
class FormatIfSupported {
|
||||
public:
|
||||
explicit FormatIfSupported(const T& value)
|
||||
explicit FormatIfSupported(T const& value)
|
||||
: m_value(value)
|
||||
{
|
||||
}
|
||||
|
||||
const T& value() const { return m_value; }
|
||||
T const& value() const { return m_value; }
|
||||
|
||||
private:
|
||||
const T& m_value;
|
||||
T const& m_value;
|
||||
};
|
||||
template<typename T, bool Supported = false>
|
||||
struct __FormatIfSupported : Formatter<StringView> {
|
||||
|
|
|
@ -64,13 +64,15 @@ public:
|
|||
}
|
||||
|
||||
template<typename CallableType>
|
||||
Function(CallableType&& callable) requires((IsFunctionObject<CallableType> && IsCallableWithArguments<CallableType, In...> && !IsSame<RemoveCVReference<CallableType>, Function>))
|
||||
Function(CallableType&& callable)
|
||||
requires((IsFunctionObject<CallableType> && IsCallableWithArguments<CallableType, In...> && !IsSame<RemoveCVReference<CallableType>, Function>))
|
||||
{
|
||||
init_with_callable(forward<CallableType>(callable));
|
||||
}
|
||||
|
||||
template<typename FunctionType>
|
||||
Function(FunctionType f) requires((IsFunctionPointer<FunctionType> && IsCallableWithArguments<RemovePointer<FunctionType>, In...> && !IsSame<RemoveCVReference<FunctionType>, Function>))
|
||||
Function(FunctionType f)
|
||||
requires((IsFunctionPointer<FunctionType> && IsCallableWithArguments<RemovePointer<FunctionType>, In...> && !IsSame<RemoveCVReference<FunctionType>, Function>))
|
||||
{
|
||||
init_with_callable(move(f));
|
||||
}
|
||||
|
@ -96,7 +98,8 @@ public:
|
|||
explicit operator bool() const { return !!callable_wrapper(); }
|
||||
|
||||
template<typename CallableType>
|
||||
Function& operator=(CallableType&& callable) requires((IsFunctionObject<CallableType> && IsCallableWithArguments<CallableType, In...>))
|
||||
Function& operator=(CallableType&& callable)
|
||||
requires((IsFunctionObject<CallableType> && IsCallableWithArguments<CallableType, In...>))
|
||||
{
|
||||
clear();
|
||||
init_with_callable(forward<CallableType>(callable));
|
||||
|
@ -104,7 +107,8 @@ public:
|
|||
}
|
||||
|
||||
template<typename FunctionType>
|
||||
Function& operator=(FunctionType f) requires((IsFunctionPointer<FunctionType> && IsCallableWithArguments<RemovePointer<FunctionType>, In...>))
|
||||
Function& operator=(FunctionType f)
|
||||
requires((IsFunctionPointer<FunctionType> && IsCallableWithArguments<RemovePointer<FunctionType>, In...>))
|
||||
{
|
||||
clear();
|
||||
if (f)
|
||||
|
|
|
@ -70,7 +70,7 @@ public:
|
|||
}
|
||||
|
||||
template<typename T>
|
||||
constexpr bool consume_specific(const T& next)
|
||||
constexpr bool consume_specific(T const& next)
|
||||
{
|
||||
if (!next_is(next))
|
||||
return false;
|
||||
|
|
33
AK/HashMap.h
33
AK/HashMap.h
|
@ -48,14 +48,14 @@ public:
|
|||
void clear() { m_table.clear(); }
|
||||
void clear_with_capacity() { m_table.clear_with_capacity(); }
|
||||
|
||||
HashSetResult set(const K& key, const V& value) { return m_table.set({ key, value }); }
|
||||
HashSetResult set(const K& key, V&& value) { return m_table.set({ key, move(value) }); }
|
||||
HashSetResult set(K const& key, V const& value) { return m_table.set({ key, value }); }
|
||||
HashSetResult set(K const& key, V&& value) { return m_table.set({ key, move(value) }); }
|
||||
HashSetResult set(K&& key, V&& value) { return m_table.set({ move(key), move(value) }); }
|
||||
ErrorOr<HashSetResult> try_set(const K& key, const V& value) { return m_table.try_set({ key, value }); }
|
||||
ErrorOr<HashSetResult> try_set(const K& key, V&& value) { return m_table.try_set({ key, move(value) }); }
|
||||
ErrorOr<HashSetResult> try_set(K const& key, V const& value) { return m_table.try_set({ key, value }); }
|
||||
ErrorOr<HashSetResult> try_set(K const& key, V&& value) { return m_table.try_set({ key, move(value) }); }
|
||||
ErrorOr<HashSetResult> try_set(K&& key, V&& value) { return m_table.try_set({ move(key), move(value) }); }
|
||||
|
||||
bool remove(const K& key)
|
||||
bool remove(K const& key)
|
||||
{
|
||||
auto it = find(key);
|
||||
if (it != end()) {
|
||||
|
@ -90,7 +90,7 @@ public:
|
|||
|
||||
[[nodiscard]] IteratorType begin() { return m_table.begin(); }
|
||||
[[nodiscard]] IteratorType end() { return m_table.end(); }
|
||||
[[nodiscard]] IteratorType find(const K& key)
|
||||
[[nodiscard]] IteratorType find(K const& key)
|
||||
{
|
||||
return m_table.find(KeyTraits::hash(key), [&](auto& entry) { return KeyTraits::equals(key, entry.key); });
|
||||
}
|
||||
|
@ -102,7 +102,7 @@ public:
|
|||
|
||||
[[nodiscard]] ConstIteratorType begin() const { return m_table.begin(); }
|
||||
[[nodiscard]] ConstIteratorType end() const { return m_table.end(); }
|
||||
[[nodiscard]] ConstIteratorType find(const K& key) const
|
||||
[[nodiscard]] ConstIteratorType find(K const& key) const
|
||||
{
|
||||
return m_table.find(KeyTraits::hash(key), [&](auto& entry) { return KeyTraits::equals(key, entry.key); });
|
||||
}
|
||||
|
@ -127,7 +127,8 @@ public:
|
|||
void ensure_capacity(size_t capacity) { m_table.ensure_capacity(capacity); }
|
||||
ErrorOr<void> try_ensure_capacity(size_t capacity) { return m_table.try_ensure_capacity(capacity); }
|
||||
|
||||
Optional<typename Traits<V>::ConstPeekType> get(const K& key) const requires(!IsPointer<typename Traits<V>::PeekType>)
|
||||
Optional<typename Traits<V>::ConstPeekType> get(K const& key) const
|
||||
requires(!IsPointer<typename Traits<V>::PeekType>)
|
||||
{
|
||||
auto it = find(key);
|
||||
if (it == end())
|
||||
|
@ -135,7 +136,8 @@ public:
|
|||
return (*it).value;
|
||||
}
|
||||
|
||||
Optional<typename Traits<V>::ConstPeekType> get(const K& key) const requires(IsPointer<typename Traits<V>::PeekType>)
|
||||
Optional<typename Traits<V>::ConstPeekType> get(K const& key) const
|
||||
requires(IsPointer<typename Traits<V>::PeekType>)
|
||||
{
|
||||
auto it = find(key);
|
||||
if (it == end())
|
||||
|
@ -143,7 +145,8 @@ public:
|
|||
return (*it).value;
|
||||
}
|
||||
|
||||
Optional<typename Traits<V>::PeekType> get(const K& key) requires(!IsConst<typename Traits<V>::PeekType>)
|
||||
Optional<typename Traits<V>::PeekType> get(K const& key)
|
||||
requires(!IsConst<typename Traits<V>::PeekType>)
|
||||
{
|
||||
auto it = find(key);
|
||||
if (it == end())
|
||||
|
@ -153,7 +156,8 @@ public:
|
|||
|
||||
template<Concepts::HashCompatible<K> Key>
|
||||
requires(IsSame<KeyTraits, Traits<K>>) Optional<typename Traits<V>::PeekType> get(Key const& key)
|
||||
const requires(!IsPointer<typename Traits<V>::PeekType>)
|
||||
const
|
||||
requires(!IsPointer<typename Traits<V>::PeekType>)
|
||||
{
|
||||
auto it = find(key);
|
||||
if (it == end())
|
||||
|
@ -163,7 +167,8 @@ public:
|
|||
|
||||
template<Concepts::HashCompatible<K> Key>
|
||||
requires(IsSame<KeyTraits, Traits<K>>) Optional<typename Traits<V>::ConstPeekType> get(Key const& key)
|
||||
const requires(IsPointer<typename Traits<V>::PeekType>)
|
||||
const
|
||||
requires(IsPointer<typename Traits<V>::PeekType>)
|
||||
{
|
||||
auto it = find(key);
|
||||
if (it == end())
|
||||
|
@ -181,7 +186,7 @@ public:
|
|||
return (*it).value;
|
||||
}
|
||||
|
||||
[[nodiscard]] bool contains(const K& key) const
|
||||
[[nodiscard]] bool contains(K const& key) const
|
||||
{
|
||||
return find(key) != end();
|
||||
}
|
||||
|
@ -197,7 +202,7 @@ public:
|
|||
m_table.remove(it);
|
||||
}
|
||||
|
||||
V& ensure(const K& key)
|
||||
V& ensure(K const& key)
|
||||
{
|
||||
auto it = find(key);
|
||||
if (it != end())
|
||||
|
|
|
@ -115,7 +115,7 @@ class HashTable {
|
|||
alignas(T) u8 storage[sizeof(T)];
|
||||
|
||||
T* slot() { return reinterpret_cast<T*>(storage); }
|
||||
const T* slot() const { return reinterpret_cast<const T*>(storage); }
|
||||
T const* slot() const { return reinterpret_cast<T const*>(storage); }
|
||||
};
|
||||
|
||||
struct OrderedBucket {
|
||||
|
@ -124,7 +124,7 @@ class HashTable {
|
|||
BucketState state;
|
||||
alignas(T) u8 storage[sizeof(T)];
|
||||
T* slot() { return reinterpret_cast<T*>(storage); }
|
||||
const T* slot() const { return reinterpret_cast<const T*>(storage); }
|
||||
T const* slot() const { return reinterpret_cast<T const*>(storage); }
|
||||
};
|
||||
|
||||
using BucketType = Conditional<IsOrdered, OrderedBucket, Bucket>;
|
||||
|
@ -265,8 +265,8 @@ public:
|
|||
}
|
||||
|
||||
using ConstIterator = Conditional<IsOrdered,
|
||||
OrderedHashTableIterator<const HashTable, const T, const BucketType>,
|
||||
HashTableIterator<const HashTable, const T, const BucketType>>;
|
||||
OrderedHashTableIterator<const HashTable, const T, BucketType const>,
|
||||
HashTableIterator<const HashTable, const T, BucketType const>>;
|
||||
|
||||
[[nodiscard]] ConstIterator begin() const
|
||||
{
|
||||
|
@ -389,7 +389,7 @@ public:
|
|||
return find(Traits<K>::hash(value), move(predicate));
|
||||
}
|
||||
|
||||
bool remove(const T& value)
|
||||
bool remove(T const& value)
|
||||
{
|
||||
auto it = find(value);
|
||||
if (it != end()) {
|
||||
|
|
|
@ -127,7 +127,7 @@ public:
|
|||
Iterator begin_from(K key) { return Iterator(static_cast<TreeNode*>(BaseTree::find(this->m_root, key))); }
|
||||
Iterator begin_from(V& value) { return Iterator(&(value.*member)); }
|
||||
|
||||
using ConstIterator = BaseIterator<const V>;
|
||||
using ConstIterator = BaseIterator<V const>;
|
||||
ConstIterator begin() const { return ConstIterator(static_cast<TreeNode*>(this->m_minimum)); }
|
||||
ConstIterator end() const { return {}; }
|
||||
ConstIterator begin_from(K key) const { return ConstIterator(static_cast<TreeNode*>(BaseTree::find(this->m_rootF, key))); }
|
||||
|
|
|
@ -43,7 +43,8 @@ constexpr void const* bitap_bitwise(void const* haystack, size_t haystack_length
|
|||
}
|
||||
|
||||
template<typename HaystackIterT>
|
||||
inline Optional<size_t> memmem(HaystackIterT const& haystack_begin, HaystackIterT const& haystack_end, Span<const u8> needle) requires(requires { (*haystack_begin).data(); (*haystack_begin).size(); })
|
||||
inline Optional<size_t> memmem(HaystackIterT const& haystack_begin, HaystackIterT const& haystack_end, Span<u8 const> needle)
|
||||
requires(requires { (*haystack_begin).data(); (*haystack_begin).size(); })
|
||||
{
|
||||
auto prepare_kmp_partial_table = [&] {
|
||||
Vector<int, 64> table;
|
||||
|
@ -122,7 +123,7 @@ inline Optional<size_t> memmem_optional(void const* haystack, size_t haystack_le
|
|||
}
|
||||
|
||||
// Fallback to KMP.
|
||||
Array<Span<const u8>, 1> spans { Span<const u8> { (u8 const*)haystack, haystack_length } };
|
||||
Array<Span<u8 const>, 1> spans { Span<u8 const> { (u8 const*)haystack, haystack_length } };
|
||||
return memmem(spans.begin(), spans.end(), { (u8 const*)needle, needle_length });
|
||||
}
|
||||
|
||||
|
|
|
@ -26,13 +26,13 @@ public:
|
|||
~NeverDestroyed() = default;
|
||||
|
||||
T* operator->() { return &get(); }
|
||||
const T* operator->() const { return &get(); }
|
||||
T const* operator->() const { return &get(); }
|
||||
|
||||
T& operator*() { return get(); }
|
||||
const T& operator*() const { return get(); }
|
||||
T const& operator*() const { return get(); }
|
||||
|
||||
T& get() { return reinterpret_cast<T&>(storage); }
|
||||
const T& get() const { return reinterpret_cast<T&>(storage); }
|
||||
T const& get() const { return reinterpret_cast<T&>(storage); }
|
||||
|
||||
private:
|
||||
alignas(T) u8 storage[sizeof(T)];
|
||||
|
|
|
@ -164,7 +164,7 @@ inline NonnullOwnPtr<T> make(Args&&... args)
|
|||
template<typename T>
|
||||
struct Traits<NonnullOwnPtr<T>> : public GenericTraits<NonnullOwnPtr<T>> {
|
||||
using PeekType = T*;
|
||||
using ConstPeekType = const T*;
|
||||
using ConstPeekType = T const*;
|
||||
static unsigned hash(NonnullOwnPtr<T> const& p) { return ptr_hash((FlatPtr)p.ptr()); }
|
||||
static bool equals(NonnullOwnPtr<T> const& a, NonnullOwnPtr<T> const& b) { return a.ptr() == b.ptr(); }
|
||||
};
|
||||
|
@ -176,10 +176,10 @@ inline void swap(NonnullOwnPtr<T>& a, NonnullOwnPtr<U>& b)
|
|||
}
|
||||
|
||||
template<typename T>
|
||||
struct Formatter<NonnullOwnPtr<T>> : Formatter<const T*> {
|
||||
struct Formatter<NonnullOwnPtr<T>> : Formatter<T const*> {
|
||||
ErrorOr<void> format(FormatBuilder& builder, NonnullOwnPtr<T> const& value)
|
||||
{
|
||||
return Formatter<const T*>::format(builder, value.ptr());
|
||||
return Formatter<T const*>::format(builder, value.ptr());
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -56,7 +56,8 @@ public:
|
|||
}
|
||||
|
||||
template<typename U>
|
||||
ALWAYS_INLINE NonnullRefPtr(U const& object) requires(IsConvertible<U*, T*>)
|
||||
ALWAYS_INLINE NonnullRefPtr(U const& object)
|
||||
requires(IsConvertible<U*, T*>)
|
||||
: m_ptr(const_cast<T*>(static_cast<T const*>(&object)))
|
||||
{
|
||||
m_ptr->ref();
|
||||
|
@ -73,7 +74,8 @@ public:
|
|||
}
|
||||
|
||||
template<typename U>
|
||||
ALWAYS_INLINE NonnullRefPtr(NonnullRefPtr<U>&& other) requires(IsConvertible<U*, T*>)
|
||||
ALWAYS_INLINE NonnullRefPtr(NonnullRefPtr<U>&& other)
|
||||
requires(IsConvertible<U*, T*>)
|
||||
: m_ptr(static_cast<T*>(&other.leak_ref()))
|
||||
{
|
||||
}
|
||||
|
@ -85,7 +87,8 @@ public:
|
|||
}
|
||||
|
||||
template<typename U>
|
||||
ALWAYS_INLINE NonnullRefPtr(NonnullRefPtr<U> const& other) requires(IsConvertible<U*, T*>)
|
||||
ALWAYS_INLINE NonnullRefPtr(NonnullRefPtr<U> const& other)
|
||||
requires(IsConvertible<U*, T*>)
|
||||
: m_ptr(const_cast<T*>(static_cast<T const*>(other.ptr())))
|
||||
{
|
||||
m_ptr->ref();
|
||||
|
@ -120,7 +123,8 @@ public:
|
|||
}
|
||||
|
||||
template<typename U>
|
||||
NonnullRefPtr& operator=(NonnullRefPtr<U> const& other) requires(IsConvertible<U*, T*>)
|
||||
NonnullRefPtr& operator=(NonnullRefPtr<U> const& other)
|
||||
requires(IsConvertible<U*, T*>)
|
||||
{
|
||||
NonnullRefPtr tmp { other };
|
||||
swap(tmp);
|
||||
|
@ -135,7 +139,8 @@ public:
|
|||
}
|
||||
|
||||
template<typename U>
|
||||
NonnullRefPtr& operator=(NonnullRefPtr<U>&& other) requires(IsConvertible<U*, T*>)
|
||||
NonnullRefPtr& operator=(NonnullRefPtr<U>&& other)
|
||||
requires(IsConvertible<U*, T*>)
|
||||
{
|
||||
NonnullRefPtr tmp { move(other) };
|
||||
swap(tmp);
|
||||
|
@ -190,7 +195,8 @@ public:
|
|||
}
|
||||
|
||||
template<typename U>
|
||||
void swap(NonnullRefPtr<U>& other) requires(IsConvertible<U*, T*>)
|
||||
void swap(NonnullRefPtr<U>& other)
|
||||
requires(IsConvertible<U*, T*>)
|
||||
{
|
||||
AK::swap(m_ptr, other.m_ptr);
|
||||
}
|
||||
|
@ -198,7 +204,11 @@ public:
|
|||
bool operator==(NonnullRefPtr const& other) const { return m_ptr == other.m_ptr; }
|
||||
|
||||
template<typename RawPtr>
|
||||
bool operator==(RawPtr other) const requires(IsPointer<RawPtr>) { return m_ptr == other; }
|
||||
bool operator==(RawPtr other) const
|
||||
requires(IsPointer<RawPtr>)
|
||||
{
|
||||
return m_ptr == other;
|
||||
}
|
||||
|
||||
// clang-format off
|
||||
private:
|
||||
|
@ -229,7 +239,8 @@ struct Formatter<NonnullRefPtr<T>> : Formatter<T const*> {
|
|||
};
|
||||
|
||||
template<typename T, typename U>
|
||||
inline void swap(NonnullRefPtr<T>& a, NonnullRefPtr<U>& b) requires(IsConvertible<U*, T*>)
|
||||
inline void swap(NonnullRefPtr<T>& a, NonnullRefPtr<U>& b)
|
||||
requires(IsConvertible<U*, T*>)
|
||||
{
|
||||
a.swap(b);
|
||||
}
|
||||
|
|
|
@ -38,23 +38,33 @@ public:
|
|||
ALWAYS_INLINE Optional() = default;
|
||||
|
||||
#ifdef AK_HAS_CONDITIONALLY_TRIVIAL
|
||||
Optional(Optional const& other) requires(!IsCopyConstructible<T>) = delete;
|
||||
Optional(Optional const& other)
|
||||
requires(!IsCopyConstructible<T>)
|
||||
= delete;
|
||||
Optional(Optional const& other) = default;
|
||||
|
||||
Optional(Optional&& other) requires(!IsMoveConstructible<T>) = delete;
|
||||
Optional(Optional&& other)
|
||||
requires(!IsMoveConstructible<T>)
|
||||
= delete;
|
||||
|
||||
Optional& operator=(Optional const&) requires(!IsCopyConstructible<T> || !IsDestructible<T>) = delete;
|
||||
Optional& operator=(Optional const&)
|
||||
requires(!IsCopyConstructible<T> || !IsDestructible<T>)
|
||||
= delete;
|
||||
Optional& operator=(Optional const&) = default;
|
||||
|
||||
Optional& operator=(Optional&& other) requires(!IsMoveConstructible<T> || !IsDestructible<T>) = delete;
|
||||
Optional& operator=(Optional&& other)
|
||||
requires(!IsMoveConstructible<T> || !IsDestructible<T>)
|
||||
= delete;
|
||||
|
||||
~Optional() requires(!IsDestructible<T>) = delete;
|
||||
~Optional()
|
||||
requires(!IsDestructible<T>)
|
||||
= delete;
|
||||
~Optional() = default;
|
||||
#endif
|
||||
|
||||
ALWAYS_INLINE Optional(Optional const& other)
|
||||
#ifdef AK_HAS_CONDITIONALLY_TRIVIAL
|
||||
requires(!IsTriviallyCopyConstructible<T>)
|
||||
requires(!IsTriviallyCopyConstructible<T>)
|
||||
#endif
|
||||
: m_has_value(other.m_has_value)
|
||||
{
|
||||
|
@ -78,7 +88,7 @@ public:
|
|||
}
|
||||
|
||||
template<typename U>
|
||||
requires(IsConstructible<T, U&&> && !IsSpecializationOf<T, Optional> && !IsSpecializationOf<U, Optional>) ALWAYS_INLINE explicit Optional(Optional<U>&& other)
|
||||
requires(IsConstructible<T, U &&> && !IsSpecializationOf<T, Optional> && !IsSpecializationOf<U, Optional>) ALWAYS_INLINE explicit Optional(Optional<U>&& other)
|
||||
: m_has_value(other.m_has_value)
|
||||
{
|
||||
if (other.has_value())
|
||||
|
@ -86,7 +96,8 @@ public:
|
|||
}
|
||||
|
||||
template<typename U = T>
|
||||
ALWAYS_INLINE explicit(!IsConvertible<U&&, T>) Optional(U&& value) requires(!IsSame<RemoveCVReference<U>, Optional<T>> && IsConstructible<T, U&&>)
|
||||
ALWAYS_INLINE explicit(!IsConvertible<U&&, T>) Optional(U&& value)
|
||||
requires(!IsSame<RemoveCVReference<U>, Optional<T>> && IsConstructible<T, U &&>)
|
||||
: m_has_value(true)
|
||||
{
|
||||
new (&m_storage) T(forward<U>(value));
|
||||
|
@ -94,7 +105,7 @@ public:
|
|||
|
||||
ALWAYS_INLINE Optional& operator=(Optional const& other)
|
||||
#ifdef AK_HAS_CONDITIONALLY_TRIVIAL
|
||||
requires(!IsTriviallyCopyConstructible<T> || !IsTriviallyDestructible<T>)
|
||||
requires(!IsTriviallyCopyConstructible<T> || !IsTriviallyDestructible<T>)
|
||||
#endif
|
||||
{
|
||||
if (this != &other) {
|
||||
|
@ -133,7 +144,7 @@ public:
|
|||
|
||||
ALWAYS_INLINE ~Optional()
|
||||
#ifdef AK_HAS_CONDITIONALLY_TRIVIAL
|
||||
requires(!IsTriviallyDestructible<T>)
|
||||
requires(!IsTriviallyDestructible<T>)
|
||||
#endif
|
||||
{
|
||||
clear();
|
||||
|
@ -222,7 +233,8 @@ public:
|
|||
ALWAYS_INLINE Optional() = default;
|
||||
|
||||
template<typename U = T>
|
||||
ALWAYS_INLINE Optional(U& value) requires(CanBePlacedInOptional<U&>)
|
||||
ALWAYS_INLINE Optional(U& value)
|
||||
requires(CanBePlacedInOptional<U&>)
|
||||
: m_pointer(&value)
|
||||
{
|
||||
}
|
||||
|
@ -244,13 +256,15 @@ public:
|
|||
}
|
||||
|
||||
template<typename U>
|
||||
ALWAYS_INLINE Optional(Optional<U> const& other) requires(CanBePlacedInOptional<U>)
|
||||
ALWAYS_INLINE Optional(Optional<U> const& other)
|
||||
requires(CanBePlacedInOptional<U>)
|
||||
: m_pointer(other.m_pointer)
|
||||
{
|
||||
}
|
||||
|
||||
template<typename U>
|
||||
ALWAYS_INLINE Optional(Optional<U>&& other) requires(CanBePlacedInOptional<U>)
|
||||
ALWAYS_INLINE Optional(Optional<U>&& other)
|
||||
requires(CanBePlacedInOptional<U>)
|
||||
: m_pointer(other.m_pointer)
|
||||
{
|
||||
other.m_pointer = nullptr;
|
||||
|
@ -270,14 +284,16 @@ public:
|
|||
}
|
||||
|
||||
template<typename U>
|
||||
ALWAYS_INLINE Optional& operator=(Optional<U> const& other) requires(CanBePlacedInOptional<U>)
|
||||
ALWAYS_INLINE Optional& operator=(Optional<U> const& other)
|
||||
requires(CanBePlacedInOptional<U>)
|
||||
{
|
||||
m_pointer = other.m_pointer;
|
||||
return *this;
|
||||
}
|
||||
|
||||
template<typename U>
|
||||
ALWAYS_INLINE Optional& operator=(Optional<U>&& other) requires(CanBePlacedInOptional<U>)
|
||||
ALWAYS_INLINE Optional& operator=(Optional<U>&& other)
|
||||
requires(CanBePlacedInOptional<U>)
|
||||
{
|
||||
m_pointer = other.m_pointer;
|
||||
other.m_pointer = nullptr;
|
||||
|
@ -286,7 +302,8 @@ public:
|
|||
|
||||
// Note: Disallows assignment from a temporary as this does not do any lifetime extension.
|
||||
template<typename U>
|
||||
ALWAYS_INLINE Optional& operator=(U&& value) requires(CanBePlacedInOptional<U>&& IsLvalueReference<U>)
|
||||
ALWAYS_INLINE Optional& operator=(U&& value)
|
||||
requires(CanBePlacedInOptional<U> && IsLvalueReference<U>)
|
||||
{
|
||||
m_pointer = &value;
|
||||
return *this;
|
||||
|
|
|
@ -217,7 +217,7 @@ inline ErrorOr<NonnullOwnPtr<T>> try_make(Args&&... args)
|
|||
template<typename T>
|
||||
struct Traits<OwnPtr<T>> : public GenericTraits<OwnPtr<T>> {
|
||||
using PeekType = T*;
|
||||
using ConstPeekType = const T*;
|
||||
using ConstPeekType = T const*;
|
||||
static unsigned hash(OwnPtr<T> const& p) { return ptr_hash(p.ptr()); }
|
||||
static bool equals(OwnPtr<T> const& a, OwnPtr<T> const& b) { return a.ptr() == b.ptr(); }
|
||||
};
|
||||
|
|
|
@ -475,7 +475,7 @@ struct VaArgNextArgument {
|
|||
};
|
||||
|
||||
#define PRINTF_IMPL_DELEGATE_TO_IMPL(c) \
|
||||
case* #c: \
|
||||
case *#c: \
|
||||
ret += impl.format_##c(state, ap); \
|
||||
break;
|
||||
|
||||
|
|
|
@ -56,7 +56,7 @@ public:
|
|||
return value;
|
||||
}
|
||||
|
||||
const T& head() const
|
||||
T const& head() const
|
||||
{
|
||||
VERIFY(!is_empty());
|
||||
return m_segments.first()->data[m_index_into_first];
|
||||
|
|
|
@ -505,7 +505,7 @@ public:
|
|||
Iterator end() { return {}; }
|
||||
Iterator begin_from(K key) { return Iterator(static_cast<Node*>(BaseTree::find(this->m_root, key))); }
|
||||
|
||||
using ConstIterator = RedBlackTreeIterator<const RedBlackTree, const V>;
|
||||
using ConstIterator = RedBlackTreeIterator<const RedBlackTree, V const>;
|
||||
friend ConstIterator;
|
||||
ConstIterator begin() const { return ConstIterator(static_cast<Node*>(this->m_minimum)); }
|
||||
ConstIterator end() const { return {}; }
|
||||
|
|
36
AK/RefPtr.h
36
AK/RefPtr.h
|
@ -66,20 +66,23 @@ public:
|
|||
}
|
||||
|
||||
template<typename U>
|
||||
ALWAYS_INLINE RefPtr(NonnullRefPtr<U> const& other) requires(IsConvertible<U*, T*>)
|
||||
ALWAYS_INLINE RefPtr(NonnullRefPtr<U> const& other)
|
||||
requires(IsConvertible<U*, T*>)
|
||||
: m_ptr(const_cast<T*>(static_cast<T const*>(other.ptr())))
|
||||
{
|
||||
m_ptr->ref();
|
||||
}
|
||||
|
||||
template<typename U>
|
||||
ALWAYS_INLINE RefPtr(NonnullRefPtr<U>&& other) requires(IsConvertible<U*, T*>)
|
||||
ALWAYS_INLINE RefPtr(NonnullRefPtr<U>&& other)
|
||||
requires(IsConvertible<U*, T*>)
|
||||
: m_ptr(static_cast<T*>(&other.leak_ref()))
|
||||
{
|
||||
}
|
||||
|
||||
template<typename U>
|
||||
RefPtr(RefPtr<U>&& other) requires(IsConvertible<U*, T*>)
|
||||
RefPtr(RefPtr<U>&& other)
|
||||
requires(IsConvertible<U*, T*>)
|
||||
: m_ptr(static_cast<T*>(other.leak_ref()))
|
||||
{
|
||||
}
|
||||
|
@ -91,7 +94,8 @@ public:
|
|||
}
|
||||
|
||||
template<typename U>
|
||||
RefPtr(RefPtr<U> const& other) requires(IsConvertible<U*, T*>)
|
||||
RefPtr(RefPtr<U> const& other)
|
||||
requires(IsConvertible<U*, T*>)
|
||||
: m_ptr(const_cast<T*>(static_cast<T const*>(other.ptr())))
|
||||
{
|
||||
ref_if_not_null(m_ptr);
|
||||
|
@ -116,7 +120,8 @@ public:
|
|||
}
|
||||
|
||||
template<typename U>
|
||||
void swap(RefPtr<U>& other) requires(IsConvertible<U*, T*>)
|
||||
void swap(RefPtr<U>& other)
|
||||
requires(IsConvertible<U*, T*>)
|
||||
{
|
||||
AK::swap(m_ptr, other.m_ptr);
|
||||
}
|
||||
|
@ -129,7 +134,8 @@ public:
|
|||
}
|
||||
|
||||
template<typename U>
|
||||
ALWAYS_INLINE RefPtr& operator=(RefPtr<U>&& other) requires(IsConvertible<U*, T*>)
|
||||
ALWAYS_INLINE RefPtr& operator=(RefPtr<U>&& other)
|
||||
requires(IsConvertible<U*, T*>)
|
||||
{
|
||||
RefPtr tmp { move(other) };
|
||||
swap(tmp);
|
||||
|
@ -137,7 +143,8 @@ public:
|
|||
}
|
||||
|
||||
template<typename U>
|
||||
ALWAYS_INLINE RefPtr& operator=(NonnullRefPtr<U>&& other) requires(IsConvertible<U*, T*>)
|
||||
ALWAYS_INLINE RefPtr& operator=(NonnullRefPtr<U>&& other)
|
||||
requires(IsConvertible<U*, T*>)
|
||||
{
|
||||
RefPtr tmp { move(other) };
|
||||
swap(tmp);
|
||||
|
@ -152,7 +159,8 @@ public:
|
|||
}
|
||||
|
||||
template<typename U>
|
||||
ALWAYS_INLINE RefPtr& operator=(NonnullRefPtr<U> const& other) requires(IsConvertible<U*, T*>)
|
||||
ALWAYS_INLINE RefPtr& operator=(NonnullRefPtr<U> const& other)
|
||||
requires(IsConvertible<U*, T*>)
|
||||
{
|
||||
RefPtr tmp { other };
|
||||
swap(tmp);
|
||||
|
@ -167,7 +175,8 @@ public:
|
|||
}
|
||||
|
||||
template<typename U>
|
||||
ALWAYS_INLINE RefPtr& operator=(RefPtr<U> const& other) requires(IsConvertible<U*, T*>)
|
||||
ALWAYS_INLINE RefPtr& operator=(RefPtr<U> const& other)
|
||||
requires(IsConvertible<U*, T*>)
|
||||
{
|
||||
RefPtr tmp { other };
|
||||
swap(tmp);
|
||||
|
@ -255,7 +264,11 @@ public:
|
|||
bool operator==(NonnullRefPtr<U> const& other) const { return as_ptr() == other.m_ptr; }
|
||||
|
||||
template<typename RawPtr>
|
||||
bool operator==(RawPtr other) const requires(IsPointer<RawPtr>) { return as_ptr() == other; }
|
||||
bool operator==(RawPtr other) const
|
||||
requires(IsPointer<RawPtr>)
|
||||
{
|
||||
return as_ptr() == other;
|
||||
}
|
||||
|
||||
ALWAYS_INLINE bool is_null() const { return !m_ptr; }
|
||||
|
||||
|
@ -303,7 +316,8 @@ inline RefPtr<T> static_ptr_cast(RefPtr<U> const& ptr)
|
|||
}
|
||||
|
||||
template<typename T, typename U>
|
||||
inline void swap(RefPtr<T>& a, RefPtr<U>& b) requires(IsConvertible<U*, T*>)
|
||||
inline void swap(RefPtr<T>& a, RefPtr<U>& b)
|
||||
requires(IsConvertible<U*, T*>)
|
||||
{
|
||||
a.swap(b);
|
||||
}
|
||||
|
|
|
@ -22,7 +22,7 @@ public:
|
|||
m_variable = m_saved_value;
|
||||
}
|
||||
|
||||
void set_override_rollback_value(const T& value)
|
||||
void set_override_rollback_value(T const& value)
|
||||
{
|
||||
m_saved_value = value;
|
||||
}
|
||||
|
|
|
@ -71,7 +71,7 @@ private:
|
|||
: value(move(v))
|
||||
{
|
||||
}
|
||||
explicit Node(const T& v)
|
||||
explicit Node(T const& v)
|
||||
: value(v)
|
||||
{
|
||||
}
|
||||
|
@ -120,7 +120,7 @@ public:
|
|||
VERIFY(head());
|
||||
return head()->value;
|
||||
}
|
||||
const T& first() const
|
||||
T const& first() const
|
||||
{
|
||||
VERIFY(head());
|
||||
return head()->value;
|
||||
|
@ -130,7 +130,7 @@ public:
|
|||
VERIFY(head());
|
||||
return tail()->value;
|
||||
}
|
||||
const T& last() const
|
||||
T const& last() const
|
||||
{
|
||||
VERIFY(head());
|
||||
return tail()->value;
|
||||
|
@ -194,7 +194,7 @@ public:
|
|||
}
|
||||
#endif
|
||||
|
||||
bool contains_slow(const T& value) const
|
||||
bool contains_slow(T const& value) const
|
||||
{
|
||||
return find(value) != end();
|
||||
}
|
||||
|
@ -204,7 +204,7 @@ public:
|
|||
Iterator begin() { return Iterator(m_head); }
|
||||
Iterator end() { return {}; }
|
||||
|
||||
using ConstIterator = SinglyLinkedListIterator<const SinglyLinkedList, const T>;
|
||||
using ConstIterator = SinglyLinkedListIterator<const SinglyLinkedList, T const>;
|
||||
friend ConstIterator;
|
||||
ConstIterator begin() const { return ConstIterator(m_head); }
|
||||
ConstIterator end() const { return {}; }
|
||||
|
@ -221,12 +221,12 @@ public:
|
|||
return AK::find_if(begin(), end(), forward<TUnaryPredicate>(pred));
|
||||
}
|
||||
|
||||
ConstIterator find(const T& value) const
|
||||
ConstIterator find(T const& value) const
|
||||
{
|
||||
return find_if([&](auto& other) { return Traits<T>::equals(value, other); });
|
||||
}
|
||||
|
||||
Iterator find(const T& value)
|
||||
Iterator find(T const& value)
|
||||
{
|
||||
return find_if([&](auto& other) { return Traits<T>::equals(value, other); });
|
||||
}
|
||||
|
|
|
@ -38,7 +38,7 @@ public:
|
|||
return List::first();
|
||||
}
|
||||
|
||||
const T& first() const
|
||||
T const& first() const
|
||||
{
|
||||
return List::first();
|
||||
}
|
||||
|
@ -48,7 +48,7 @@ public:
|
|||
return List::last();
|
||||
}
|
||||
|
||||
const T& last() const
|
||||
T const& last() const
|
||||
{
|
||||
return List::last();
|
||||
}
|
||||
|
@ -76,7 +76,7 @@ public:
|
|||
}
|
||||
#endif
|
||||
|
||||
bool contains_slow(const T& value) const
|
||||
bool contains_slow(T const& value) const
|
||||
{
|
||||
return List::contains_slow(value);
|
||||
}
|
||||
|
@ -103,12 +103,12 @@ public:
|
|||
return List::find_if(forward<TUnaryPredicate>(pred));
|
||||
}
|
||||
|
||||
ConstIterator find(const T& value) const
|
||||
ConstIterator find(T const& value) const
|
||||
{
|
||||
return List::find(value);
|
||||
}
|
||||
|
||||
Iterator find(const T& value)
|
||||
Iterator find(T const& value)
|
||||
{
|
||||
return List::find(value);
|
||||
}
|
||||
|
|
|
@ -43,7 +43,7 @@ public:
|
|||
|
||||
template<size_t size>
|
||||
requires(IsConst<T>)
|
||||
ALWAYS_INLINE constexpr Span(Array<T, size> const& array)
|
||||
ALWAYS_INLINE constexpr Span(Array<T, size> const& array)
|
||||
: m_values(array.data())
|
||||
, m_size(size)
|
||||
{
|
||||
|
|
|
@ -16,7 +16,7 @@ public:
|
|||
Stack() = default;
|
||||
~Stack() = default;
|
||||
|
||||
bool push(const T& item)
|
||||
bool push(T const& item)
|
||||
{
|
||||
if (m_stack.size() >= stack_size)
|
||||
return false;
|
||||
|
@ -58,7 +58,7 @@ public:
|
|||
return m_stack.last();
|
||||
}
|
||||
|
||||
const T& top() const
|
||||
T const& top() const
|
||||
{
|
||||
return m_stack.last();
|
||||
}
|
||||
|
|
|
@ -50,7 +50,7 @@ struct __RemoveConst {
|
|||
using Type = T;
|
||||
};
|
||||
template<class T>
|
||||
struct __RemoveConst<const T> {
|
||||
struct __RemoveConst<T const> {
|
||||
using Type = T;
|
||||
};
|
||||
template<class T>
|
||||
|
@ -62,7 +62,7 @@ struct __RemoveVolatile {
|
|||
};
|
||||
|
||||
template<class T>
|
||||
struct __RemoveVolatile<volatile T> {
|
||||
struct __RemoveVolatile<T volatile> {
|
||||
using Type = T;
|
||||
};
|
||||
|
||||
|
@ -125,9 +125,9 @@ inline constexpr bool IsFunction<Ret(Args...) const volatile&> = true;
|
|||
template<class Ret, class... Args>
|
||||
inline constexpr bool IsFunction<Ret(Args..., ...) const volatile&> = true;
|
||||
template<class Ret, class... Args>
|
||||
inline constexpr bool IsFunction<Ret(Args...) &&> = true;
|
||||
inline constexpr bool IsFunction<Ret(Args...)&&> = true;
|
||||
template<class Ret, class... Args>
|
||||
inline constexpr bool IsFunction<Ret(Args..., ...) &&> = true;
|
||||
inline constexpr bool IsFunction<Ret(Args..., ...)&&> = true;
|
||||
template<class Ret, class... Args>
|
||||
inline constexpr bool IsFunction<Ret(Args...) const&&> = true;
|
||||
template<class Ret, class... Args>
|
||||
|
@ -373,7 +373,7 @@ template<class T>
|
|||
inline constexpr bool IsConst = false;
|
||||
|
||||
template<class T>
|
||||
inline constexpr bool IsConst<const T> = true;
|
||||
inline constexpr bool IsConst<T const> = true;
|
||||
|
||||
template<typename T>
|
||||
inline constexpr bool IsEnum = __is_enum(T);
|
||||
|
@ -597,7 +597,7 @@ template<typename T>
|
|||
using Decay = typename __Decay<T>::type;
|
||||
|
||||
template<typename T, typename U>
|
||||
inline constexpr bool IsPointerOfType = IsPointer<Decay<U>>&& IsSame<T, RemoveCV<RemovePointer<Decay<U>>>>;
|
||||
inline constexpr bool IsPointerOfType = IsPointer<Decay<U>> && IsSame<T, RemoveCV<RemovePointer<Decay<U>>>>;
|
||||
|
||||
template<typename T, typename U>
|
||||
inline constexpr bool IsHashCompatible = false;
|
||||
|
|
|
@ -17,13 +17,15 @@
|
|||
#include <AK/Assertions.h>
|
||||
|
||||
template<typename T, typename U>
|
||||
constexpr auto round_up_to_power_of_two(T value, U power_of_two) requires(AK::Detail::IsIntegral<T>&& AK::Detail::IsIntegral<U>)
|
||||
constexpr auto round_up_to_power_of_two(T value, U power_of_two)
|
||||
requires(AK::Detail::IsIntegral<T> && AK::Detail::IsIntegral<U>)
|
||||
{
|
||||
return ((value - 1) & ~(power_of_two - 1)) + power_of_two;
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
constexpr bool is_power_of_two(T value) requires(AK::Detail::IsIntegral<T>)
|
||||
constexpr bool is_power_of_two(T value)
|
||||
requires(AK::Detail::IsIntegral<T>)
|
||||
{
|
||||
return value && !((value) & (value - 1));
|
||||
}
|
||||
|
@ -81,19 +83,19 @@ constexpr SizeType array_size(T (&)[N])
|
|||
}
|
||||
|
||||
template<typename T>
|
||||
constexpr T min(const T& a, IdentityType<T> const& b)
|
||||
constexpr T min(T const& a, IdentityType<T> const& b)
|
||||
{
|
||||
return b < a ? b : a;
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
constexpr T max(const T& a, IdentityType<T> const& b)
|
||||
constexpr T max(T const& a, IdentityType<T> const& b)
|
||||
{
|
||||
return a < b ? b : a;
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
constexpr T clamp(const T& value, IdentityType<T> const& min, IdentityType<T> const& max)
|
||||
constexpr T clamp(T const& value, IdentityType<T> const& min, IdentityType<T> const& max)
|
||||
{
|
||||
VERIFY(max >= min);
|
||||
if (value > max)
|
||||
|
@ -141,7 +143,8 @@ template<typename T>
|
|||
using RawPtr = typename Detail::_RawPtr<T>::Type;
|
||||
|
||||
template<typename V>
|
||||
constexpr decltype(auto) to_underlying(V value) requires(IsEnum<V>)
|
||||
constexpr decltype(auto) to_underlying(V value)
|
||||
requires(IsEnum<V>)
|
||||
{
|
||||
return static_cast<UnderlyingType<V>>(value);
|
||||
}
|
||||
|
|
12
AK/Stream.h
12
AK/Stream.h
|
@ -135,13 +135,15 @@ InputStream& operator>>(InputStream& stream, Optional<T>& value)
|
|||
}
|
||||
|
||||
template<typename Integral>
|
||||
InputStream& operator>>(InputStream& stream, Integral& value) requires IsIntegral<Integral>
|
||||
InputStream& operator>>(InputStream& stream, Integral& value)
|
||||
requires IsIntegral<Integral>
|
||||
{
|
||||
stream.read_or_error({ &value, sizeof(value) });
|
||||
return stream;
|
||||
}
|
||||
template<typename Integral>
|
||||
OutputStream& operator<<(OutputStream& stream, Integral value) requires IsIntegral<Integral>
|
||||
OutputStream& operator<<(OutputStream& stream, Integral value)
|
||||
requires IsIntegral<Integral>
|
||||
{
|
||||
stream.write_or_error({ &value, sizeof(value) });
|
||||
return stream;
|
||||
|
@ -150,13 +152,15 @@ OutputStream& operator<<(OutputStream& stream, Integral value) requires IsIntegr
|
|||
#ifndef KERNEL
|
||||
|
||||
template<typename FloatingPoint>
|
||||
InputStream& operator>>(InputStream& stream, FloatingPoint& value) requires IsFloatingPoint<FloatingPoint>
|
||||
InputStream& operator>>(InputStream& stream, FloatingPoint& value)
|
||||
requires IsFloatingPoint<FloatingPoint>
|
||||
{
|
||||
stream.read_or_error({ &value, sizeof(value) });
|
||||
return stream;
|
||||
}
|
||||
template<typename FloatingPoint>
|
||||
OutputStream& operator<<(OutputStream& stream, FloatingPoint value) requires IsFloatingPoint<FloatingPoint>
|
||||
OutputStream& operator<<(OutputStream& stream, FloatingPoint value)
|
||||
requires IsFloatingPoint<FloatingPoint>
|
||||
{
|
||||
stream.write_or_error({ &value, sizeof(value) });
|
||||
return stream;
|
||||
|
|
|
@ -283,7 +283,8 @@ public:
|
|||
}
|
||||
|
||||
template<typename T>
|
||||
[[nodiscard]] static String number(T value) requires IsArithmetic<T>
|
||||
[[nodiscard]] static String number(T value)
|
||||
requires IsArithmetic<T>
|
||||
{
|
||||
return formatted("{}", value);
|
||||
}
|
||||
|
|
21
AK/Time.h
21
AK/Time.h
|
@ -24,11 +24,10 @@ namespace AK {
|
|||
|
||||
// Concept to detect types which look like timespec without requiring the type.
|
||||
template<typename T>
|
||||
concept TimeSpecType = requires(T t)
|
||||
{
|
||||
t.tv_sec;
|
||||
t.tv_nsec;
|
||||
};
|
||||
concept TimeSpecType = requires(T t) {
|
||||
t.tv_sec;
|
||||
t.tv_nsec;
|
||||
};
|
||||
|
||||
constexpr bool is_leap_year(int year)
|
||||
{
|
||||
|
@ -315,38 +314,38 @@ inline void timespec_to_timeval(TimespecType const& ts, TimevalType& tv)
|
|||
}
|
||||
|
||||
template<TimeSpecType T>
|
||||
inline bool operator>=(const T& a, const T& b)
|
||||
inline bool operator>=(T const& a, T const& b)
|
||||
{
|
||||
return a.tv_sec > b.tv_sec || (a.tv_sec == b.tv_sec && a.tv_nsec >= b.tv_nsec);
|
||||
}
|
||||
|
||||
template<TimeSpecType T>
|
||||
inline bool operator>(const T& a, const T& b)
|
||||
inline bool operator>(T const& a, T const& b)
|
||||
{
|
||||
return a.tv_sec > b.tv_sec || (a.tv_sec == b.tv_sec && a.tv_nsec > b.tv_nsec);
|
||||
}
|
||||
|
||||
template<TimeSpecType T>
|
||||
inline bool operator<(const T& a, const T& b)
|
||||
inline bool operator<(T const& a, T const& b)
|
||||
{
|
||||
return a.tv_sec < b.tv_sec || (a.tv_sec == b.tv_sec && a.tv_nsec < b.tv_nsec);
|
||||
}
|
||||
|
||||
template<TimeSpecType T>
|
||||
inline bool operator<=(const T& a, const T& b)
|
||||
inline bool operator<=(T const& a, T const& b)
|
||||
|
||||
{
|
||||
return a.tv_sec < b.tv_sec || (a.tv_sec == b.tv_sec && a.tv_nsec <= b.tv_nsec);
|
||||
}
|
||||
|
||||
template<TimeSpecType T>
|
||||
inline bool operator==(const T& a, const T& b)
|
||||
inline bool operator==(T const& a, T const& b)
|
||||
{
|
||||
return a.tv_sec == b.tv_sec && a.tv_nsec == b.tv_nsec;
|
||||
}
|
||||
|
||||
template<TimeSpecType T>
|
||||
inline bool operator!=(const T& a, const T& b)
|
||||
inline bool operator!=(T const& a, T const& b)
|
||||
{
|
||||
return a.tv_sec != b.tv_sec || a.tv_nsec != b.tv_nsec;
|
||||
}
|
||||
|
|
|
@ -20,7 +20,7 @@ struct GenericTraits {
|
|||
using PeekType = T&;
|
||||
using ConstPeekType = T const&;
|
||||
static constexpr bool is_trivial() { return false; }
|
||||
static constexpr bool equals(const T& a, const T& b) { return a == b; }
|
||||
static constexpr bool equals(T const& a, T const& b) { return a == b; }
|
||||
template<Concepts::HashCompatible<T> U>
|
||||
static bool equals(U const& a, T const& b) { return a == b; }
|
||||
};
|
||||
|
|
42
AK/Trie.h
42
AK/Trie.h
|
@ -69,10 +69,26 @@ public:
|
|||
return const_cast<Trie*>(this)->traverse_until_last_accessible_node(it, end);
|
||||
}
|
||||
|
||||
Optional<MetadataType> metadata() const requires(!IsNullPointer<MetadataType>) { return m_metadata; }
|
||||
void set_metadata(MetadataType metadata) requires(!IsNullPointer<MetadataType>) { m_metadata = move(metadata); }
|
||||
MetadataType const& metadata_value() const requires(!IsNullPointer<MetadataType>) { return m_metadata.value(); }
|
||||
MetadataType& metadata_value() requires(!IsNullPointer<MetadataType>) { return m_metadata.value(); }
|
||||
Optional<MetadataType> metadata() const
|
||||
requires(!IsNullPointer<MetadataType>)
|
||||
{
|
||||
return m_metadata;
|
||||
}
|
||||
void set_metadata(MetadataType metadata)
|
||||
requires(!IsNullPointer<MetadataType>)
|
||||
{
|
||||
m_metadata = move(metadata);
|
||||
}
|
||||
MetadataType const& metadata_value() const
|
||||
requires(!IsNullPointer<MetadataType>)
|
||||
{
|
||||
return m_metadata.value();
|
||||
}
|
||||
MetadataType& metadata_value()
|
||||
requires(!IsNullPointer<MetadataType>)
|
||||
{
|
||||
return m_metadata.value();
|
||||
}
|
||||
|
||||
ValueType const& value() const { return m_value; }
|
||||
ValueType& value() { return m_value; }
|
||||
|
@ -99,7 +115,8 @@ public:
|
|||
|
||||
template<typename It, typename ProvideMetadataFunction>
|
||||
ErrorOr<BaseType*> insert(
|
||||
It& it, It const& end, MetadataType metadata, ProvideMetadataFunction provide_missing_metadata) requires(!IsNullPointer<MetadataType>)
|
||||
It& it, It const& end, MetadataType metadata, ProvideMetadataFunction provide_missing_metadata)
|
||||
requires(!IsNullPointer<MetadataType>)
|
||||
{
|
||||
Trie* last_root_node = &traverse_until_last_accessible_node(it, end);
|
||||
auto invoke_provide_missing_metadata = [&]<typename... Ts>(Ts&&... args) -> ErrorOr<Optional<MetadataType>> {
|
||||
|
@ -119,7 +136,8 @@ public:
|
|||
}
|
||||
|
||||
template<typename It>
|
||||
ErrorOr<BaseType*> insert(It& it, It const& end) requires(IsNullPointer<MetadataType>)
|
||||
ErrorOr<BaseType*> insert(It& it, It const& end)
|
||||
requires(IsNullPointer<MetadataType>)
|
||||
{
|
||||
Trie* last_root_node = &traverse_until_last_accessible_node(it, end);
|
||||
for (; it != end; ++it) {
|
||||
|
@ -133,14 +151,16 @@ public:
|
|||
|
||||
template<typename It, typename ProvideMetadataFunction>
|
||||
ErrorOr<BaseType*> insert(
|
||||
It const& begin, It const& end, MetadataType metadata, ProvideMetadataFunction provide_missing_metadata) requires(!IsNullPointer<MetadataType>)
|
||||
It const& begin, It const& end, MetadataType metadata, ProvideMetadataFunction provide_missing_metadata)
|
||||
requires(!IsNullPointer<MetadataType>)
|
||||
{
|
||||
auto it = begin;
|
||||
return insert(it, end, move(metadata), move(provide_missing_metadata));
|
||||
}
|
||||
|
||||
template<typename It>
|
||||
ErrorOr<BaseType*> insert(It const& begin, It const& end) requires(IsNullPointer<MetadataType>)
|
||||
ErrorOr<BaseType*> insert(It const& begin, It const& end)
|
||||
requires(IsNullPointer<MetadataType>)
|
||||
{
|
||||
auto it = begin;
|
||||
return insert(it, end);
|
||||
|
@ -185,7 +205,8 @@ public:
|
|||
[[nodiscard]] bool is_empty() const { return m_children.is_empty(); }
|
||||
void clear() { m_children.clear(); }
|
||||
|
||||
ErrorOr<BaseType> deep_copy() requires(requires(ValueType value) { { value->try_clone() } -> SpecializationOf<ErrorOr>; })
|
||||
ErrorOr<BaseType> deep_copy()
|
||||
requires(requires(ValueType value) { { value->try_clone() } -> SpecializationOf<ErrorOr>; })
|
||||
{
|
||||
Trie root(TRY(m_value->try_clone()), TRY(copy_metadata(m_metadata)));
|
||||
for (auto& it : m_children)
|
||||
|
@ -260,7 +281,8 @@ public:
|
|||
using DetailTrie = Detail::Trie<BaseT, Trie<ValueType, MetadataT, ValueTraits>, ValueType, MetadataT, ValueTraits>;
|
||||
using MetadataType = typename DetailTrie::MetadataType;
|
||||
|
||||
Trie(ValueType value, MetadataType metadata) requires(!IsVoid<MetadataType> && !IsNullPointer<MetadataType>)
|
||||
Trie(ValueType value, MetadataType metadata)
|
||||
requires(!IsVoid<MetadataType> && !IsNullPointer<MetadataType>)
|
||||
: DetailTrie(move(value), move(metadata))
|
||||
{
|
||||
}
|
||||
|
|
13
AK/Tuple.h
13
AK/Tuple.h
|
@ -17,12 +17,13 @@ struct Tuple {
|
|||
|
||||
template<typename T>
|
||||
struct Tuple<T> {
|
||||
Tuple(T&& value) requires(!IsSame<T&&, const T&>)
|
||||
Tuple(T&& value)
|
||||
requires(!IsSame < T &&, T const& >)
|
||||
: value(forward<T>(value))
|
||||
{
|
||||
}
|
||||
|
||||
Tuple(const T& value)
|
||||
Tuple(T const& value)
|
||||
: value(value)
|
||||
{
|
||||
}
|
||||
|
@ -35,7 +36,7 @@ struct Tuple<T> {
|
|||
}
|
||||
|
||||
template<typename U>
|
||||
const U& get() const
|
||||
U const& get() const
|
||||
{
|
||||
return const_cast<Tuple<T>&>(*this).get<U>();
|
||||
}
|
||||
|
@ -48,7 +49,7 @@ struct Tuple<T> {
|
|||
}
|
||||
|
||||
template<typename U, unsigned index>
|
||||
const U& get_with_index() const
|
||||
U const& get_with_index() const
|
||||
{
|
||||
return const_cast<Tuple<T>&>(*this).get_with_index<U, index>();
|
||||
}
|
||||
|
@ -83,7 +84,7 @@ struct Tuple<T, TRest...> : Tuple<TRest...> {
|
|||
}
|
||||
|
||||
template<typename U>
|
||||
const U& get() const
|
||||
U const& get() const
|
||||
{
|
||||
return const_cast<Tuple<T, TRest...>&>(*this).get<U>();
|
||||
}
|
||||
|
@ -98,7 +99,7 @@ struct Tuple<T, TRest...> : Tuple<TRest...> {
|
|||
}
|
||||
|
||||
template<typename U, unsigned index>
|
||||
const U& get_with_index() const
|
||||
U const& get_with_index() const
|
||||
{
|
||||
return const_cast<Tuple<T, TRest...>&>(*this).get_with_index<U, index>();
|
||||
}
|
||||
|
|
|
@ -64,7 +64,7 @@ public:
|
|||
{
|
||||
return m_low;
|
||||
}
|
||||
constexpr const T& low() const
|
||||
constexpr T const& low() const
|
||||
{
|
||||
return m_low;
|
||||
}
|
||||
|
@ -72,7 +72,7 @@ public:
|
|||
{
|
||||
return m_high;
|
||||
}
|
||||
constexpr const T& high() const
|
||||
constexpr T const& high() const
|
||||
{
|
||||
return m_high;
|
||||
}
|
||||
|
@ -81,9 +81,9 @@ public:
|
|||
{
|
||||
return Span<u8>(reinterpret_cast<u8*>(this), sizeof(R));
|
||||
}
|
||||
Span<const u8> bytes() const
|
||||
Span<u8 const> bytes() const
|
||||
{
|
||||
return Span<const u8>(reinterpret_cast<u8 const*>(this), sizeof(R));
|
||||
return Span<u8 const>(reinterpret_cast<u8 const*>(this), sizeof(R));
|
||||
}
|
||||
|
||||
template<Unsigned U>
|
||||
|
@ -93,39 +93,45 @@ public:
|
|||
}
|
||||
|
||||
// Utils
|
||||
constexpr size_t clz() const requires(IsSame<T, u64>)
|
||||
constexpr size_t clz() const
|
||||
requires(IsSame<T, u64>)
|
||||
{
|
||||
if (m_high)
|
||||
return count_leading_zeroes(m_high);
|
||||
else
|
||||
return sizeof(T) * 8 + count_leading_zeroes(m_low);
|
||||
}
|
||||
constexpr size_t clz() const requires(!IsSame<T, u64>)
|
||||
constexpr size_t clz() const
|
||||
requires(!IsSame<T, u64>)
|
||||
{
|
||||
if (m_high)
|
||||
return m_high.clz();
|
||||
else
|
||||
return sizeof(T) * 8 + m_low.clz();
|
||||
}
|
||||
constexpr size_t ctz() const requires(IsSame<T, u64>)
|
||||
constexpr size_t ctz() const
|
||||
requires(IsSame<T, u64>)
|
||||
{
|
||||
if (m_low)
|
||||
return count_trailing_zeroes(m_low);
|
||||
else
|
||||
return sizeof(T) * 8 + count_trailing_zeroes(m_high);
|
||||
}
|
||||
constexpr size_t ctz() const requires(!IsSame<T, u64>)
|
||||
constexpr size_t ctz() const
|
||||
requires(!IsSame<T, u64>)
|
||||
{
|
||||
if (m_low)
|
||||
return m_low.ctz();
|
||||
else
|
||||
return sizeof(T) * 8 + m_high.ctz();
|
||||
}
|
||||
constexpr size_t popcnt() const requires(IsSame<T, u64>)
|
||||
constexpr size_t popcnt() const
|
||||
requires(IsSame<T, u64>)
|
||||
{
|
||||
return __builtin_popcntll(m_low) + __builtin_popcntll(m_high);
|
||||
}
|
||||
constexpr size_t popcnt() const requires(!IsSame<T, u64>)
|
||||
constexpr size_t popcnt() const
|
||||
requires(!IsSame<T, u64>)
|
||||
{
|
||||
return m_low.popcnt() + m_high.popcnt();
|
||||
}
|
||||
|
@ -140,59 +146,59 @@ public:
|
|||
return m_low || m_high;
|
||||
}
|
||||
template<Unsigned U>
|
||||
requires(sizeof(T) >= sizeof(U)) constexpr bool operator==(const U& other) const
|
||||
requires(sizeof(T) >= sizeof(U)) constexpr bool operator==(U const& other) const
|
||||
{
|
||||
return !m_high && m_low == other;
|
||||
}
|
||||
template<Unsigned U>
|
||||
requires(sizeof(T) >= sizeof(U)) constexpr bool operator!=(const U& other) const
|
||||
requires(sizeof(T) >= sizeof(U)) constexpr bool operator!=(U const& other) const
|
||||
{
|
||||
return m_high || m_low != other;
|
||||
}
|
||||
template<Unsigned U>
|
||||
requires(sizeof(T) >= sizeof(U)) constexpr bool operator>(const U& other) const
|
||||
requires(sizeof(T) >= sizeof(U)) constexpr bool operator>(U const& other) const
|
||||
{
|
||||
return m_high || m_low > other;
|
||||
}
|
||||
template<Unsigned U>
|
||||
requires(sizeof(T) >= sizeof(U)) constexpr bool operator<(const U& other) const
|
||||
requires(sizeof(T) >= sizeof(U)) constexpr bool operator<(U const& other) const
|
||||
{
|
||||
return !m_high && m_low < other;
|
||||
}
|
||||
template<Unsigned U>
|
||||
requires(sizeof(T) >= sizeof(U)) constexpr bool operator>=(const U& other) const
|
||||
requires(sizeof(T) >= sizeof(U)) constexpr bool operator>=(U const& other) const
|
||||
{
|
||||
return *this == other || *this > other;
|
||||
}
|
||||
template<Unsigned U>
|
||||
requires(sizeof(T) >= sizeof(U)) constexpr bool operator<=(const U& other) const
|
||||
requires(sizeof(T) >= sizeof(U)) constexpr bool operator<=(U const& other) const
|
||||
{
|
||||
return *this == other || *this < other;
|
||||
}
|
||||
|
||||
constexpr bool operator==(const R& other) const
|
||||
constexpr bool operator==(R const& other) const
|
||||
{
|
||||
return m_low == other.low() && m_high == other.high();
|
||||
}
|
||||
constexpr bool operator!=(const R& other) const
|
||||
constexpr bool operator!=(R const& other) const
|
||||
{
|
||||
return m_low != other.low() || m_high != other.high();
|
||||
}
|
||||
constexpr bool operator>(const R& other) const
|
||||
constexpr bool operator>(R const& other) const
|
||||
{
|
||||
return m_high > other.high()
|
||||
|| (m_high == other.high() && m_low > other.low());
|
||||
}
|
||||
constexpr bool operator<(const R& other) const
|
||||
constexpr bool operator<(R const& other) const
|
||||
{
|
||||
return m_high < other.high()
|
||||
|| (m_high == other.high() && m_low < other.low());
|
||||
}
|
||||
constexpr bool operator>=(const R& other) const
|
||||
constexpr bool operator>=(R const& other) const
|
||||
{
|
||||
return *this == other || *this > other;
|
||||
}
|
||||
constexpr bool operator<=(const R& other) const
|
||||
constexpr bool operator<=(R const& other) const
|
||||
{
|
||||
return *this == other || *this < other;
|
||||
}
|
||||
|
@ -203,22 +209,22 @@ public:
|
|||
return { ~m_low, ~m_high };
|
||||
}
|
||||
template<Unsigned U>
|
||||
requires(sizeof(T) >= sizeof(U)) constexpr U operator&(const U& other) const
|
||||
requires(sizeof(T) >= sizeof(U)) constexpr U operator&(U const& other) const
|
||||
{
|
||||
return static_cast<const U>(m_low) & other;
|
||||
return static_cast<U const>(m_low) & other;
|
||||
}
|
||||
template<Unsigned U>
|
||||
requires(sizeof(T) >= sizeof(U)) constexpr R operator|(const U& other) const
|
||||
requires(sizeof(T) >= sizeof(U)) constexpr R operator|(U const& other) const
|
||||
{
|
||||
return { m_low | other, m_high };
|
||||
}
|
||||
template<Unsigned U>
|
||||
requires(sizeof(T) >= sizeof(U)) constexpr R operator^(const U& other) const
|
||||
requires(sizeof(T) >= sizeof(U)) constexpr R operator^(U const& other) const
|
||||
{
|
||||
return { m_low ^ other, m_high };
|
||||
}
|
||||
template<Unsigned U>
|
||||
constexpr R operator<<(const U& shift) const
|
||||
constexpr R operator<<(U const& shift) const
|
||||
{
|
||||
if (shift >= sizeof(R) * 8u)
|
||||
return 0u;
|
||||
|
@ -231,7 +237,7 @@ public:
|
|||
return R { m_low << shift, (m_high << shift) | overflow };
|
||||
}
|
||||
template<Unsigned U>
|
||||
constexpr R operator>>(const U& shift) const
|
||||
constexpr R operator>>(U const& shift) const
|
||||
{
|
||||
if (shift >= sizeof(R) * 8u)
|
||||
return 0u;
|
||||
|
@ -244,75 +250,75 @@ public:
|
|||
return R { (m_low >> shift) | underflow, m_high >> shift };
|
||||
}
|
||||
template<Unsigned U>
|
||||
constexpr R rol(const U& shift) const
|
||||
constexpr R rol(U const& shift) const
|
||||
{
|
||||
return (*this >> sizeof(T) * 8u - shift) | (*this << shift);
|
||||
}
|
||||
template<Unsigned U>
|
||||
constexpr R ror(const U& shift) const
|
||||
constexpr R ror(U const& shift) const
|
||||
{
|
||||
return (*this << sizeof(T) * 8u - shift) | (*this >> shift);
|
||||
}
|
||||
|
||||
constexpr R operator&(const R& other) const
|
||||
constexpr R operator&(R const& other) const
|
||||
{
|
||||
return { m_low & other.low(), m_high & other.high() };
|
||||
}
|
||||
constexpr R operator|(const R& other) const
|
||||
constexpr R operator|(R const& other) const
|
||||
{
|
||||
return { m_low | other.low(), m_high | other.high() };
|
||||
}
|
||||
constexpr R operator^(const R& other) const
|
||||
constexpr R operator^(R const& other) const
|
||||
{
|
||||
return { m_low ^ other.low(), m_high ^ other.high() };
|
||||
}
|
||||
|
||||
// Bitwise assignment
|
||||
template<Unsigned U>
|
||||
requires(sizeof(T) >= sizeof(U)) constexpr R& operator&=(const U& other)
|
||||
requires(sizeof(T) >= sizeof(U)) constexpr R& operator&=(U const& other)
|
||||
{
|
||||
m_high = 0u;
|
||||
m_low &= other;
|
||||
return *this;
|
||||
}
|
||||
template<Unsigned U>
|
||||
requires(sizeof(T) >= sizeof(U)) constexpr R& operator|=(const U& other)
|
||||
requires(sizeof(T) >= sizeof(U)) constexpr R& operator|=(U const& other)
|
||||
{
|
||||
m_low |= other;
|
||||
return *this;
|
||||
}
|
||||
template<Unsigned U>
|
||||
requires(sizeof(T) >= sizeof(U)) constexpr R& operator^=(const U& other)
|
||||
requires(sizeof(T) >= sizeof(U)) constexpr R& operator^=(U const& other)
|
||||
{
|
||||
m_low ^= other;
|
||||
return *this;
|
||||
}
|
||||
template<Unsigned U>
|
||||
constexpr R& operator>>=(const U& other)
|
||||
constexpr R& operator>>=(U const& other)
|
||||
{
|
||||
*this = *this >> other;
|
||||
return *this;
|
||||
}
|
||||
template<Unsigned U>
|
||||
constexpr R& operator<<=(const U& other)
|
||||
constexpr R& operator<<=(U const& other)
|
||||
{
|
||||
*this = *this << other;
|
||||
return *this;
|
||||
}
|
||||
|
||||
constexpr R& operator&=(const R& other)
|
||||
constexpr R& operator&=(R const& other)
|
||||
{
|
||||
m_high &= other.high();
|
||||
m_low &= other.low();
|
||||
return *this;
|
||||
}
|
||||
constexpr R& operator|=(const R& other)
|
||||
constexpr R& operator|=(R const& other)
|
||||
{
|
||||
m_high |= other.high();
|
||||
m_low |= other.low();
|
||||
return *this;
|
||||
}
|
||||
constexpr R& operator^=(const R& other)
|
||||
constexpr R& operator^=(R const& other)
|
||||
{
|
||||
m_high ^= other.high();
|
||||
m_low ^= other.low();
|
||||
|
@ -345,7 +351,7 @@ public:
|
|||
};
|
||||
}
|
||||
template<Unsigned U>
|
||||
requires(my_size() > sizeof(U) && sizeof(T) > sizeof(u64)) constexpr R addc(const U& other, bool& carry) const
|
||||
requires(my_size() > sizeof(U) && sizeof(T) > sizeof(u64)) constexpr R addc(U const& other, bool& carry) const
|
||||
{
|
||||
T lower = m_low.addc(other, carry);
|
||||
T higher = m_high.addc(0u, carry);
|
||||
|
@ -356,7 +362,7 @@ public:
|
|||
};
|
||||
}
|
||||
template<Unsigned U>
|
||||
requires(IsSame<R, U>&& IsSame<T, u64>) constexpr R addc(const U& other, bool& carry) const
|
||||
requires(IsSame<R, U> && IsSame<T, u64>) constexpr R addc(U const& other, bool& carry) const
|
||||
{
|
||||
bool low_carry = Checked<T>::addition_would_overflow(m_low, other.low());
|
||||
bool high_carry = Checked<T>::addition_would_overflow(m_high, other.high());
|
||||
|
@ -376,7 +382,7 @@ public:
|
|||
};
|
||||
}
|
||||
template<Unsigned U>
|
||||
requires(IsSame<R, U> && sizeof(T) > sizeof(u64)) constexpr R addc(const U& other, bool& carry) const
|
||||
requires(IsSame<R, U> && sizeof(T) > sizeof(u64)) constexpr R addc(U const& other, bool& carry) const
|
||||
{
|
||||
T lower = m_low.addc(other.low(), carry);
|
||||
T higher = m_high.addc(other.high(), carry);
|
||||
|
@ -387,14 +393,14 @@ public:
|
|||
};
|
||||
}
|
||||
template<Unsigned U>
|
||||
requires(my_size() < sizeof(U)) constexpr U addc(const U& other, bool& carry) const
|
||||
requires(my_size() < sizeof(U)) constexpr U addc(U const& other, bool& carry) const
|
||||
{
|
||||
return other.addc(*this, carry);
|
||||
}
|
||||
|
||||
// FIXME: subc for sizeof(T) < sizeof(U)
|
||||
template<Unsigned U>
|
||||
requires(sizeof(T) >= sizeof(U)) constexpr R subc(const U& other, bool& carry) const
|
||||
requires(sizeof(T) >= sizeof(U)) constexpr R subc(U const& other, bool& carry) const
|
||||
{
|
||||
bool low_carry = (!m_low && carry) || (m_low - carry) < other;
|
||||
bool high_carry = !m_high && low_carry;
|
||||
|
@ -405,7 +411,7 @@ public:
|
|||
|
||||
return { lower, higher };
|
||||
}
|
||||
constexpr R subc(const R& other, bool& carry) const
|
||||
constexpr R subc(R const& other, bool& carry) const
|
||||
{
|
||||
bool low_carry = (!m_low && carry) || (m_low - carry) < other.low();
|
||||
bool high_carry = (!m_high && low_carry) || (m_high - low_carry) < other.high();
|
||||
|
@ -423,7 +429,7 @@ public:
|
|||
return addc((u8)other, carry);
|
||||
}
|
||||
template<Unsigned U>
|
||||
constexpr R operator+(const U& other) const
|
||||
constexpr R operator+(U const& other) const
|
||||
{
|
||||
bool carry = false; // unused
|
||||
return addc(other, carry);
|
||||
|
@ -436,20 +442,20 @@ public:
|
|||
}
|
||||
|
||||
template<Unsigned U>
|
||||
constexpr R operator-(const U& other) const
|
||||
constexpr R operator-(U const& other) const
|
||||
{
|
||||
bool carry = false; // unused
|
||||
return subc(other, carry);
|
||||
}
|
||||
|
||||
template<Unsigned U>
|
||||
constexpr R& operator+=(const U& other)
|
||||
constexpr R& operator+=(U const& other)
|
||||
{
|
||||
*this = *this + other;
|
||||
return *this;
|
||||
}
|
||||
template<Unsigned U>
|
||||
constexpr R& operator-=(const U& other)
|
||||
constexpr R& operator-=(U const& other)
|
||||
{
|
||||
*this = *this - other;
|
||||
return *this;
|
||||
|
@ -484,7 +490,7 @@ public:
|
|||
|
||||
// FIXME: no restraints on this
|
||||
template<Unsigned U>
|
||||
requires(my_size() >= sizeof(U)) constexpr R div_mod(const U& divisor, U& remainder) const
|
||||
requires(my_size() >= sizeof(U)) constexpr R div_mod(U const& divisor, U& remainder) const
|
||||
{
|
||||
// FIXME: Is there a better way to raise a division by 0?
|
||||
// Maybe as a compiletime warning?
|
||||
|
@ -540,7 +546,7 @@ public:
|
|||
}
|
||||
|
||||
template<Unsigned U>
|
||||
requires(IsSame<R, U>&& IsSame<T, u64>) constexpr UFixedBigIntMultiplicationResult<R> wide_multiply(U const& other) const
|
||||
requires(IsSame<R, U> && IsSame<T, u64>) constexpr UFixedBigIntMultiplicationResult<R> wide_multiply(U const& other) const
|
||||
{
|
||||
auto mult_64_to_128 = [](u64 a, u64 b) -> UFixedBigIntMultiplicationResult<u64> {
|
||||
#ifdef __SIZEOF_INT128__
|
||||
|
@ -606,13 +612,13 @@ public:
|
|||
}
|
||||
|
||||
template<Unsigned U>
|
||||
constexpr R operator/(const U& other) const
|
||||
constexpr R operator/(U const& other) const
|
||||
{
|
||||
U mod { 0u }; // unused
|
||||
return div_mod(other, mod);
|
||||
}
|
||||
template<Unsigned U>
|
||||
constexpr U operator%(const U& other) const
|
||||
constexpr U operator%(U const& other) const
|
||||
{
|
||||
R res { 0u };
|
||||
div_mod(other, res);
|
||||
|
@ -620,19 +626,19 @@ public:
|
|||
}
|
||||
|
||||
template<Unsigned U>
|
||||
constexpr R& operator*=(const U& other)
|
||||
constexpr R& operator*=(U const& other)
|
||||
{
|
||||
*this = *this * other;
|
||||
return *this;
|
||||
}
|
||||
template<Unsigned U>
|
||||
constexpr R& operator/=(const U& other)
|
||||
constexpr R& operator/=(U const& other)
|
||||
{
|
||||
*this = *this / other;
|
||||
return *this;
|
||||
}
|
||||
template<Unsigned U>
|
||||
constexpr R& operator%=(const U& other)
|
||||
constexpr R& operator%=(U const& other)
|
||||
{
|
||||
*this = *this % other;
|
||||
return *this;
|
||||
|
@ -763,11 +769,13 @@ public:
|
|||
return log2() / base.log2();
|
||||
}
|
||||
|
||||
constexpr u64 fold_or() const requires(IsSame<T, u64>)
|
||||
constexpr u64 fold_or() const
|
||||
requires(IsSame<T, u64>)
|
||||
{
|
||||
return m_low | m_high;
|
||||
}
|
||||
constexpr u64 fold_or() const requires(!IsSame<T, u64>)
|
||||
constexpr u64 fold_or() const
|
||||
requires(!IsSame<T, u64>)
|
||||
{
|
||||
return m_low.fold_or() | m_high.fold_or();
|
||||
}
|
||||
|
@ -776,11 +784,13 @@ public:
|
|||
return fold_or() == 0;
|
||||
}
|
||||
|
||||
constexpr u64 fold_xor_pair(R& other) const requires(IsSame<T, u64>)
|
||||
constexpr u64 fold_xor_pair(R& other) const
|
||||
requires(IsSame<T, u64>)
|
||||
{
|
||||
return (m_low ^ other.low()) | (m_high ^ other.high());
|
||||
}
|
||||
constexpr u64 fold_xor_pair(R& other) const requires(!IsSame<T, u64>)
|
||||
constexpr u64 fold_xor_pair(R& other) const
|
||||
requires(!IsSame<T, u64>)
|
||||
{
|
||||
return (m_low.fold_xor_pair(other.low())) | (m_high.fold_xor_pair(other.high()));
|
||||
}
|
||||
|
@ -796,13 +806,25 @@ private:
|
|||
|
||||
// reverse operators
|
||||
template<Unsigned U, Unsigned T>
|
||||
requires(sizeof(U) < sizeof(T) * 2) constexpr bool operator<(const U a, UFixedBigInt<T> const& b) { return b >= a; }
|
||||
requires(sizeof(U) < sizeof(T) * 2) constexpr bool operator<(const U a, UFixedBigInt<T> const& b)
|
||||
{
|
||||
return b >= a;
|
||||
}
|
||||
template<Unsigned U, Unsigned T>
|
||||
requires(sizeof(U) < sizeof(T) * 2) constexpr bool operator>(const U a, UFixedBigInt<T> const& b) { return b <= a; }
|
||||
requires(sizeof(U) < sizeof(T) * 2) constexpr bool operator>(const U a, UFixedBigInt<T> const& b)
|
||||
{
|
||||
return b <= a;
|
||||
}
|
||||
template<Unsigned U, Unsigned T>
|
||||
requires(sizeof(U) < sizeof(T) * 2) constexpr bool operator<=(const U a, UFixedBigInt<T> const& b) { return b > a; }
|
||||
requires(sizeof(U) < sizeof(T) * 2) constexpr bool operator<=(const U a, UFixedBigInt<T> const& b)
|
||||
{
|
||||
return b > a;
|
||||
}
|
||||
template<Unsigned U, Unsigned T>
|
||||
requires(sizeof(U) < sizeof(T) * 2) constexpr bool operator>=(const U a, UFixedBigInt<T> const& b) { return b < a; }
|
||||
requires(sizeof(U) < sizeof(T) * 2) constexpr bool operator>=(const U a, UFixedBigInt<T> const& b)
|
||||
{
|
||||
return b < a;
|
||||
}
|
||||
|
||||
template<Unsigned T>
|
||||
struct Formatter<UFixedBigInt<T>> : StandardFormatter {
|
||||
|
|
|
@ -21,7 +21,8 @@ static constexpr u32 replacement_code_point = 0xfffd;
|
|||
static constexpr u32 first_supplementary_plane_code_point = 0x10000;
|
||||
|
||||
template<typename UtfViewType>
|
||||
static Vector<u16, 1> to_utf16_impl(UtfViewType const& view) requires(IsSame<UtfViewType, Utf8View> || IsSame<UtfViewType, Utf32View>)
|
||||
static Vector<u16, 1> to_utf16_impl(UtfViewType const& view)
|
||||
requires(IsSame<UtfViewType, Utf8View> || IsSame<UtfViewType, Utf32View>)
|
||||
{
|
||||
Vector<u16, 1> utf16_data;
|
||||
utf16_data.ensure_capacity(view.length());
|
||||
|
|
80
AK/Variant.h
80
AK/Variant.h
|
@ -97,7 +97,8 @@ struct VisitImpl {
|
|||
}
|
||||
|
||||
template<typename Self, typename Visitor, IndexType CurrentIndex = 0>
|
||||
ALWAYS_INLINE static constexpr decltype(auto) visit(Self& self, IndexType id, void const* data, Visitor&& visitor) requires(CurrentIndex < sizeof...(Ts))
|
||||
ALWAYS_INLINE static constexpr decltype(auto) visit(Self& self, IndexType id, void const* data, Visitor&& visitor)
|
||||
requires(CurrentIndex < sizeof...(Ts))
|
||||
{
|
||||
using T = typename TypeList<Ts...>::template Type<CurrentIndex>;
|
||||
|
||||
|
@ -129,13 +130,15 @@ struct VariantConstructTag {
|
|||
|
||||
template<typename T, typename Base>
|
||||
struct VariantConstructors {
|
||||
ALWAYS_INLINE VariantConstructors(T&& t) requires(requires { T(move(t)); })
|
||||
ALWAYS_INLINE VariantConstructors(T&& t)
|
||||
requires(requires { T(move(t)); })
|
||||
{
|
||||
internal_cast().clear_without_destruction();
|
||||
internal_cast().set(move(t), VariantNoClearTag {});
|
||||
}
|
||||
|
||||
ALWAYS_INLINE VariantConstructors(const T& t) requires(requires { T(t); })
|
||||
ALWAYS_INLINE VariantConstructors(T const& t)
|
||||
requires(requires { T(t); })
|
||||
{
|
||||
internal_cast().clear_without_destruction();
|
||||
internal_cast().set(t, VariantNoClearTag {});
|
||||
|
@ -216,7 +219,8 @@ struct Empty {
|
|||
};
|
||||
|
||||
template<typename T>
|
||||
concept NotLvalueReference = !IsLvalueReference<T>;
|
||||
concept NotLvalueReference = !
|
||||
IsLvalueReference<T>;
|
||||
|
||||
template<NotLvalueReference... Ts>
|
||||
struct Variant
|
||||
|
@ -236,13 +240,15 @@ public:
|
|||
}
|
||||
|
||||
template<typename... NewTs>
|
||||
Variant(Variant<NewTs...>&& old) requires((can_contain<NewTs>() && ...))
|
||||
Variant(Variant<NewTs...>&& old)
|
||||
requires((can_contain<NewTs>() && ...))
|
||||
: Variant(move(old).template downcast<Ts...>())
|
||||
{
|
||||
}
|
||||
|
||||
template<typename... NewTs>
|
||||
Variant(Variant<NewTs...> const& old) requires((can_contain<NewTs>() && ...))
|
||||
Variant(Variant<NewTs...> const& old)
|
||||
requires((can_contain<NewTs>() && ...))
|
||||
: Variant(old.template downcast<Ts...>())
|
||||
{
|
||||
}
|
||||
|
@ -250,32 +256,45 @@ public:
|
|||
template<NotLvalueReference... NewTs>
|
||||
friend struct Variant;
|
||||
|
||||
Variant() requires(!can_contain<Empty>()) = delete;
|
||||
Variant() requires(can_contain<Empty>())
|
||||
Variant()
|
||||
requires(!can_contain<Empty>())
|
||||
= delete;
|
||||
Variant()
|
||||
requires(can_contain<Empty>())
|
||||
: Variant(Empty())
|
||||
{
|
||||
}
|
||||
|
||||
#ifdef AK_HAS_CONDITIONALLY_TRIVIAL
|
||||
Variant(Variant const&) requires(!(IsCopyConstructible<Ts> && ...)) = delete;
|
||||
Variant(Variant const&)
|
||||
requires(!(IsCopyConstructible<Ts> && ...))
|
||||
= delete;
|
||||
Variant(Variant const&) = default;
|
||||
|
||||
Variant(Variant&&) requires(!(IsMoveConstructible<Ts> && ...)) = delete;
|
||||
Variant(Variant&&)
|
||||
requires(!(IsMoveConstructible<Ts> && ...))
|
||||
= delete;
|
||||
Variant(Variant&&) = default;
|
||||
|
||||
~Variant() requires(!(IsDestructible<Ts> && ...)) = delete;
|
||||
~Variant()
|
||||
requires(!(IsDestructible<Ts> && ...))
|
||||
= delete;
|
||||
~Variant() = default;
|
||||
|
||||
Variant& operator=(Variant const&) requires(!(IsCopyConstructible<Ts> && ...) || !(IsDestructible<Ts> && ...)) = delete;
|
||||
Variant& operator=(Variant const&)
|
||||
requires(!(IsCopyConstructible<Ts> && ...) || !(IsDestructible<Ts> && ...))
|
||||
= delete;
|
||||
Variant& operator=(Variant const&) = default;
|
||||
|
||||
Variant& operator=(Variant&&) requires(!(IsMoveConstructible<Ts> && ...) || !(IsDestructible<Ts> && ...)) = delete;
|
||||
Variant& operator=(Variant&&)
|
||||
requires(!(IsMoveConstructible<Ts> && ...) || !(IsDestructible<Ts> && ...))
|
||||
= delete;
|
||||
Variant& operator=(Variant&&) = default;
|
||||
#endif
|
||||
|
||||
ALWAYS_INLINE Variant(Variant const& old)
|
||||
#ifdef AK_HAS_CONDITIONALLY_TRIVIAL
|
||||
requires(!(IsTriviallyCopyConstructible<Ts> && ...))
|
||||
requires(!(IsTriviallyCopyConstructible<Ts> && ...))
|
||||
#endif
|
||||
: Detail::MergeAndDeduplicatePacks<Detail::VariantConstructors<Ts, Variant<Ts...>>...>()
|
||||
, m_data {}
|
||||
|
@ -290,7 +309,7 @@ public:
|
|||
// but it will still contain the "moved-from" state of the object it previously contained.
|
||||
ALWAYS_INLINE Variant(Variant&& old)
|
||||
#ifdef AK_HAS_CONDITIONALLY_TRIVIAL
|
||||
requires(!(IsTriviallyMoveConstructible<Ts> && ...))
|
||||
requires(!(IsTriviallyMoveConstructible<Ts> && ...))
|
||||
#endif
|
||||
: Detail::MergeAndDeduplicatePacks<Detail::VariantConstructors<Ts, Variant<Ts...>>...>()
|
||||
, m_index(old.m_index)
|
||||
|
@ -300,7 +319,7 @@ public:
|
|||
|
||||
ALWAYS_INLINE ~Variant()
|
||||
#ifdef AK_HAS_CONDITIONALLY_TRIVIAL
|
||||
requires(!(IsTriviallyDestructible<Ts> && ...))
|
||||
requires(!(IsTriviallyDestructible<Ts> && ...))
|
||||
#endif
|
||||
{
|
||||
Helper::delete_(m_index, m_data);
|
||||
|
@ -308,7 +327,7 @@ public:
|
|||
|
||||
ALWAYS_INLINE Variant& operator=(Variant const& other)
|
||||
#ifdef AK_HAS_CONDITIONALLY_TRIVIAL
|
||||
requires(!(IsTriviallyCopyConstructible<Ts> && ...) || !(IsTriviallyDestructible<Ts> && ...))
|
||||
requires(!(IsTriviallyCopyConstructible<Ts> && ...) || !(IsTriviallyDestructible<Ts> && ...))
|
||||
#endif
|
||||
{
|
||||
if (this != &other) {
|
||||
|
@ -323,7 +342,7 @@ public:
|
|||
|
||||
ALWAYS_INLINE Variant& operator=(Variant&& other)
|
||||
#ifdef AK_HAS_CONDITIONALLY_TRIVIAL
|
||||
requires(!(IsTriviallyMoveConstructible<Ts> && ...) || !(IsTriviallyDestructible<Ts> && ...))
|
||||
requires(!(IsTriviallyMoveConstructible<Ts> && ...) || !(IsTriviallyDestructible<Ts> && ...))
|
||||
#endif
|
||||
{
|
||||
if (this != &other) {
|
||||
|
@ -339,7 +358,8 @@ public:
|
|||
using Detail::MergeAndDeduplicatePacks<Detail::VariantConstructors<Ts, Variant<Ts...>>...>::MergeAndDeduplicatePacks;
|
||||
|
||||
template<typename T, typename StrippedT = RemoveCVReference<T>>
|
||||
void set(T&& t) requires(can_contain<StrippedT>() && requires { StrippedT(forward<T>(t)); })
|
||||
void set(T&& t)
|
||||
requires(can_contain<StrippedT>() && requires { StrippedT(forward<T>(t)); })
|
||||
{
|
||||
constexpr auto new_index = index_of<StrippedT>();
|
||||
Helper::delete_(m_index, m_data);
|
||||
|
@ -348,7 +368,8 @@ public:
|
|||
}
|
||||
|
||||
template<typename T, typename StrippedT = RemoveCVReference<T>>
|
||||
void set(T&& t, Detail::VariantNoClearTag) requires(can_contain<StrippedT>() && requires { StrippedT(forward<T>(t)); })
|
||||
void set(T&& t, Detail::VariantNoClearTag)
|
||||
requires(can_contain<StrippedT>() && requires { StrippedT(forward<T>(t)); })
|
||||
{
|
||||
constexpr auto new_index = index_of<StrippedT>();
|
||||
new (m_data) StrippedT(forward<T>(t));
|
||||
|
@ -356,7 +377,8 @@ public:
|
|||
}
|
||||
|
||||
template<typename T>
|
||||
T* get_pointer() requires(can_contain<T>())
|
||||
T* get_pointer()
|
||||
requires(can_contain<T>())
|
||||
{
|
||||
if (index_of<T>() == m_index)
|
||||
return bit_cast<T*>(&m_data);
|
||||
|
@ -364,29 +386,33 @@ public:
|
|||
}
|
||||
|
||||
template<typename T>
|
||||
T& get() requires(can_contain<T>())
|
||||
T& get()
|
||||
requires(can_contain<T>())
|
||||
{
|
||||
VERIFY(has<T>());
|
||||
return *bit_cast<T*>(&m_data);
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
const T* get_pointer() const requires(can_contain<T>())
|
||||
T const* get_pointer() const
|
||||
requires(can_contain<T>())
|
||||
{
|
||||
if (index_of<T>() == m_index)
|
||||
return bit_cast<const T*>(&m_data);
|
||||
return bit_cast<T const*>(&m_data);
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
const T& get() const requires(can_contain<T>())
|
||||
T const& get() const
|
||||
requires(can_contain<T>())
|
||||
{
|
||||
VERIFY(has<T>());
|
||||
return *bit_cast<const T*>(&m_data);
|
||||
return *bit_cast<T const*>(&m_data);
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
[[nodiscard]] bool has() const requires(can_contain<T>())
|
||||
[[nodiscard]] bool has() const
|
||||
requires(can_contain<T>())
|
||||
{
|
||||
return index_of<T>() == m_index;
|
||||
}
|
||||
|
|
64
AK/Vector.h
64
AK/Vector.h
|
@ -31,13 +31,13 @@ struct CanBePlacedInsideVectorHelper;
|
|||
template<typename StorageType>
|
||||
struct CanBePlacedInsideVectorHelper<StorageType, true> {
|
||||
template<typename U>
|
||||
static constexpr bool value = requires(U&& u) { StorageType { &u }; };
|
||||
static constexpr bool value = requires(U && u) { StorageType { &u }; };
|
||||
};
|
||||
|
||||
template<typename StorageType>
|
||||
struct CanBePlacedInsideVectorHelper<StorageType, false> {
|
||||
template<typename U>
|
||||
static constexpr bool value = requires(U&& u) { StorageType(forward<U>(u)); };
|
||||
static constexpr bool value = requires(U && u) { StorageType(forward<U>(u)); };
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -59,7 +59,8 @@ public:
|
|||
{
|
||||
}
|
||||
|
||||
Vector(std::initializer_list<T> list) requires(!IsLvalueReference<T>)
|
||||
Vector(std::initializer_list<T> list)
|
||||
requires(!IsLvalueReference<T>)
|
||||
{
|
||||
ensure_capacity(list.size());
|
||||
for (auto& item : list)
|
||||
|
@ -89,7 +90,8 @@ public:
|
|||
m_size = other.size();
|
||||
}
|
||||
|
||||
explicit Vector(Span<T const> other) requires(!IsLvalueReference<T>)
|
||||
explicit Vector(Span<T const> other)
|
||||
requires(!IsLvalueReference<T>)
|
||||
{
|
||||
ensure_capacity(other.size());
|
||||
TypedTransfer<StorageType>::copy(data(), other.data(), other.size());
|
||||
|
@ -161,7 +163,8 @@ public:
|
|||
VisibleType& last() { return at(size() - 1); }
|
||||
|
||||
template<typename TUnaryPredicate>
|
||||
Optional<VisibleType&> first_matching(TUnaryPredicate const& predicate) requires(!contains_reference)
|
||||
Optional<VisibleType&> first_matching(TUnaryPredicate const& predicate)
|
||||
requires(!contains_reference)
|
||||
{
|
||||
for (size_t i = 0; i < size(); ++i) {
|
||||
if (predicate(at(i))) {
|
||||
|
@ -172,7 +175,8 @@ public:
|
|||
}
|
||||
|
||||
template<typename TUnaryPredicate>
|
||||
Optional<VisibleType const&> first_matching(TUnaryPredicate const& predicate) const requires(!contains_reference)
|
||||
Optional<VisibleType const&> first_matching(TUnaryPredicate const& predicate) const
|
||||
requires(!contains_reference)
|
||||
{
|
||||
for (size_t i = 0; i < size(); ++i) {
|
||||
if (predicate(at(i))) {
|
||||
|
@ -183,7 +187,8 @@ public:
|
|||
}
|
||||
|
||||
template<typename TUnaryPredicate>
|
||||
Optional<VisibleType&> last_matching(TUnaryPredicate const& predicate) requires(!contains_reference)
|
||||
Optional<VisibleType&> last_matching(TUnaryPredicate const& predicate)
|
||||
requires(!contains_reference)
|
||||
{
|
||||
for (ssize_t i = size() - 1; i >= 0; --i) {
|
||||
if (predicate(at(i))) {
|
||||
|
@ -225,13 +230,15 @@ public:
|
|||
#ifndef KERNEL
|
||||
|
||||
template<typename U = T>
|
||||
void insert(size_t index, U&& value) requires(CanBePlacedInsideVector<U>)
|
||||
void insert(size_t index, U&& value)
|
||||
requires(CanBePlacedInsideVector<U>)
|
||||
{
|
||||
MUST(try_insert<U>(index, forward<U>(value)));
|
||||
}
|
||||
|
||||
template<typename TUnaryPredicate, typename U = T>
|
||||
void insert_before_matching(U&& value, TUnaryPredicate const& predicate, size_t first_index = 0, size_t* inserted_index = nullptr) requires(CanBePlacedInsideVector<U>)
|
||||
void insert_before_matching(U&& value, TUnaryPredicate const& predicate, size_t first_index = 0, size_t* inserted_index = nullptr)
|
||||
requires(CanBePlacedInsideVector<U>)
|
||||
{
|
||||
MUST(try_insert_before_matching(forward<U>(value), predicate, first_index, inserted_index));
|
||||
}
|
||||
|
@ -256,7 +263,8 @@ public:
|
|||
MUST(try_append(move(value)));
|
||||
}
|
||||
|
||||
ALWAYS_INLINE void append(T const& value) requires(!contains_reference)
|
||||
ALWAYS_INLINE void append(T const& value)
|
||||
requires(!contains_reference)
|
||||
{
|
||||
MUST(try_append(T(value)));
|
||||
}
|
||||
|
@ -269,7 +277,8 @@ public:
|
|||
#endif
|
||||
|
||||
template<typename U = T>
|
||||
ALWAYS_INLINE void unchecked_append(U&& value) requires(CanBePlacedInsideVector<U>)
|
||||
ALWAYS_INLINE void unchecked_append(U&& value)
|
||||
requires(CanBePlacedInsideVector<U>)
|
||||
{
|
||||
VERIFY((size() + 1) <= capacity());
|
||||
if constexpr (contains_reference)
|
||||
|
@ -290,13 +299,15 @@ public:
|
|||
|
||||
#ifndef KERNEL
|
||||
template<class... Args>
|
||||
void empend(Args&&... args) requires(!contains_reference)
|
||||
void empend(Args&&... args)
|
||||
requires(!contains_reference)
|
||||
{
|
||||
MUST(try_empend(forward<Args>(args)...));
|
||||
}
|
||||
|
||||
template<typename U = T>
|
||||
void prepend(U&& value) requires(CanBePlacedInsideVector<U>)
|
||||
void prepend(U&& value)
|
||||
requires(CanBePlacedInsideVector<U>)
|
||||
{
|
||||
MUST(try_insert(0, forward<U>(value)));
|
||||
}
|
||||
|
@ -481,7 +492,8 @@ public:
|
|||
}
|
||||
|
||||
template<typename U = T>
|
||||
ErrorOr<void> try_insert(size_t index, U&& value) requires(CanBePlacedInsideVector<U>)
|
||||
ErrorOr<void> try_insert(size_t index, U&& value)
|
||||
requires(CanBePlacedInsideVector<U>)
|
||||
{
|
||||
if (index > size())
|
||||
return Error::from_errno(EINVAL);
|
||||
|
@ -505,7 +517,8 @@ public:
|
|||
}
|
||||
|
||||
template<typename TUnaryPredicate, typename U = T>
|
||||
ErrorOr<void> try_insert_before_matching(U&& value, TUnaryPredicate const& predicate, size_t first_index = 0, size_t* inserted_index = nullptr) requires(CanBePlacedInsideVector<U>)
|
||||
ErrorOr<void> try_insert_before_matching(U&& value, TUnaryPredicate const& predicate, size_t first_index = 0, size_t* inserted_index = nullptr)
|
||||
requires(CanBePlacedInsideVector<U>)
|
||||
{
|
||||
for (size_t i = first_index; i < size(); ++i) {
|
||||
if (predicate(at(i))) {
|
||||
|
@ -554,7 +567,8 @@ public:
|
|||
return {};
|
||||
}
|
||||
|
||||
ErrorOr<void> try_append(T const& value) requires(!contains_reference)
|
||||
ErrorOr<void> try_append(T const& value)
|
||||
requires(!contains_reference)
|
||||
{
|
||||
return try_append(T(value));
|
||||
}
|
||||
|
@ -570,7 +584,8 @@ public:
|
|||
}
|
||||
|
||||
template<class... Args>
|
||||
ErrorOr<void> try_empend(Args&&... args) requires(!contains_reference)
|
||||
ErrorOr<void> try_empend(Args&&... args)
|
||||
requires(!contains_reference)
|
||||
{
|
||||
TRY(try_grow_capacity(m_size + 1));
|
||||
new (slot(m_size)) StorageType { forward<Args>(args)... };
|
||||
|
@ -579,7 +594,8 @@ public:
|
|||
}
|
||||
|
||||
template<typename U = T>
|
||||
ErrorOr<void> try_prepend(U&& value) requires(CanBePlacedInsideVector<U>)
|
||||
ErrorOr<void> try_prepend(U&& value)
|
||||
requires(CanBePlacedInsideVector<U>)
|
||||
{
|
||||
return try_insert(0, forward<U>(value));
|
||||
}
|
||||
|
@ -650,7 +666,8 @@ public:
|
|||
return {};
|
||||
}
|
||||
|
||||
ErrorOr<void> try_resize(size_t new_size, bool keep_capacity = false) requires(!contains_reference)
|
||||
ErrorOr<void> try_resize(size_t new_size, bool keep_capacity = false)
|
||||
requires(!contains_reference)
|
||||
{
|
||||
if (new_size <= size()) {
|
||||
shrink(new_size, keep_capacity);
|
||||
|
@ -665,7 +682,8 @@ public:
|
|||
return {};
|
||||
}
|
||||
|
||||
ErrorOr<void> try_resize_and_keep_capacity(size_t new_size) requires(!contains_reference)
|
||||
ErrorOr<void> try_resize_and_keep_capacity(size_t new_size)
|
||||
requires(!contains_reference)
|
||||
{
|
||||
return try_resize(new_size, true);
|
||||
}
|
||||
|
@ -699,12 +717,14 @@ public:
|
|||
m_size = new_size;
|
||||
}
|
||||
|
||||
void resize(size_t new_size, bool keep_capacity = false) requires(!contains_reference)
|
||||
void resize(size_t new_size, bool keep_capacity = false)
|
||||
requires(!contains_reference)
|
||||
{
|
||||
MUST(try_resize(new_size, keep_capacity));
|
||||
}
|
||||
|
||||
void resize_and_keep_capacity(size_t new_size) requires(!contains_reference)
|
||||
void resize_and_keep_capacity(size_t new_size)
|
||||
requires(!contains_reference)
|
||||
{
|
||||
MUST(try_resize_and_keep_capacity(new_size));
|
||||
}
|
||||
|
|
40
AK/WeakPtr.h
40
AK/WeakPtr.h
|
@ -19,26 +19,30 @@ public:
|
|||
WeakPtr() = default;
|
||||
|
||||
template<typename U>
|
||||
WeakPtr(WeakPtr<U> const& other) requires(IsBaseOf<T, U>)
|
||||
WeakPtr(WeakPtr<U> const& other)
|
||||
requires(IsBaseOf<T, U>)
|
||||
: m_link(other.m_link)
|
||||
{
|
||||
}
|
||||
|
||||
template<typename U>
|
||||
WeakPtr(WeakPtr<U>&& other) requires(IsBaseOf<T, U>)
|
||||
WeakPtr(WeakPtr<U>&& other)
|
||||
requires(IsBaseOf<T, U>)
|
||||
: m_link(other.take_link())
|
||||
{
|
||||
}
|
||||
|
||||
template<typename U>
|
||||
WeakPtr& operator=(WeakPtr<U>&& other) requires(IsBaseOf<T, U>)
|
||||
WeakPtr& operator=(WeakPtr<U>&& other)
|
||||
requires(IsBaseOf<T, U>)
|
||||
{
|
||||
m_link = other.take_link();
|
||||
return *this;
|
||||
}
|
||||
|
||||
template<typename U>
|
||||
WeakPtr& operator=(WeakPtr<U> const& other) requires(IsBaseOf<T, U>)
|
||||
WeakPtr& operator=(WeakPtr<U> const& other)
|
||||
requires(IsBaseOf<T, U>)
|
||||
{
|
||||
if ((void const*)this != (void const*)&other)
|
||||
m_link = other.m_link;
|
||||
|
@ -52,40 +56,46 @@ public:
|
|||
}
|
||||
|
||||
template<typename U>
|
||||
WeakPtr(const U& object) requires(IsBaseOf<T, U>)
|
||||
WeakPtr(U const& object)
|
||||
requires(IsBaseOf<T, U>)
|
||||
: m_link(object.template make_weak_ptr<U>().take_link())
|
||||
{
|
||||
}
|
||||
|
||||
template<typename U>
|
||||
WeakPtr(const U* object) requires(IsBaseOf<T, U>)
|
||||
WeakPtr(U const* object)
|
||||
requires(IsBaseOf<T, U>)
|
||||
{
|
||||
if (object)
|
||||
m_link = object->template make_weak_ptr<U>().take_link();
|
||||
}
|
||||
|
||||
template<typename U>
|
||||
WeakPtr(RefPtr<U> const& object) requires(IsBaseOf<T, U>)
|
||||
WeakPtr(RefPtr<U> const& object)
|
||||
requires(IsBaseOf<T, U>)
|
||||
{
|
||||
if (object)
|
||||
m_link = object->template make_weak_ptr<U>().take_link();
|
||||
}
|
||||
|
||||
template<typename U>
|
||||
WeakPtr(NonnullRefPtr<U> const& object) requires(IsBaseOf<T, U>)
|
||||
WeakPtr(NonnullRefPtr<U> const& object)
|
||||
requires(IsBaseOf<T, U>)
|
||||
{
|
||||
m_link = object->template make_weak_ptr<U>().take_link();
|
||||
}
|
||||
|
||||
template<typename U>
|
||||
WeakPtr& operator=(const U& object) requires(IsBaseOf<T, U>)
|
||||
WeakPtr& operator=(U const& object)
|
||||
requires(IsBaseOf<T, U>)
|
||||
{
|
||||
m_link = object.template make_weak_ptr<U>().take_link();
|
||||
return *this;
|
||||
}
|
||||
|
||||
template<typename U>
|
||||
WeakPtr& operator=(const U* object) requires(IsBaseOf<T, U>)
|
||||
WeakPtr& operator=(U const* object)
|
||||
requires(IsBaseOf<T, U>)
|
||||
{
|
||||
if (object)
|
||||
m_link = object->template make_weak_ptr<U>().take_link();
|
||||
|
@ -95,7 +105,8 @@ public:
|
|||
}
|
||||
|
||||
template<typename U>
|
||||
WeakPtr& operator=(RefPtr<U> const& object) requires(IsBaseOf<T, U>)
|
||||
WeakPtr& operator=(RefPtr<U> const& object)
|
||||
requires(IsBaseOf<T, U>)
|
||||
{
|
||||
if (object)
|
||||
m_link = object->template make_weak_ptr<U>().take_link();
|
||||
|
@ -105,7 +116,8 @@ public:
|
|||
}
|
||||
|
||||
template<typename U>
|
||||
WeakPtr& operator=(NonnullRefPtr<U> const& object) requires(IsBaseOf<T, U>)
|
||||
WeakPtr& operator=(NonnullRefPtr<U> const& object)
|
||||
requires(IsBaseOf<T, U>)
|
||||
{
|
||||
m_link = object->template make_weak_ptr<U>().take_link();
|
||||
return *this;
|
||||
|
@ -154,10 +166,10 @@ inline ErrorOr<WeakPtr<U>> Weakable<T>::try_make_weak_ptr() const
|
|||
}
|
||||
|
||||
template<typename T>
|
||||
struct Formatter<WeakPtr<T>> : Formatter<const T*> {
|
||||
struct Formatter<WeakPtr<T>> : Formatter<T const*> {
|
||||
ErrorOr<void> format(FormatBuilder& builder, WeakPtr<T> const& value)
|
||||
{
|
||||
return Formatter<const T*>::format(builder, value.ptr());
|
||||
return Formatter<T const*>::format(builder, value.ptr());
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -27,7 +27,7 @@ class WeakLink : public RefCounted<WeakLink> {
|
|||
public:
|
||||
template<typename T>
|
||||
RefPtr<T> strong_ref() const
|
||||
requires(IsBaseOf<RefCountedBase, T>)
|
||||
requires(IsBaseOf<RefCountedBase, T>)
|
||||
{
|
||||
return static_cast<T*>(m_ptr);
|
||||
}
|
||||
|
|
|
@ -54,7 +54,8 @@ public:
|
|||
ConsoleDevice& console_device();
|
||||
|
||||
template<typename DeviceType, typename... Args>
|
||||
static inline ErrorOr<NonnullLockRefPtr<DeviceType>> try_create_device(Args&&... args) requires(requires(Args... args) { DeviceType::try_create(args...); })
|
||||
static inline ErrorOr<NonnullLockRefPtr<DeviceType>> try_create_device(Args&&... args)
|
||||
requires(requires(Args... args) { DeviceType::try_create(args...); })
|
||||
{
|
||||
auto device = TRY(DeviceType::try_create(forward<Args>(args)...));
|
||||
device->after_inserting();
|
||||
|
|
|
@ -76,7 +76,7 @@ UNMAP_AFTER_INIT ErrorOr<void> VMWareGraphicsAdapter::initialize_fifo_registers(
|
|||
return Error::from_errno(ENOTSUP);
|
||||
}
|
||||
|
||||
m_fifo_registers = TRY(Memory::map_typed<volatile VMWareDisplayFIFORegisters>(fifo_physical_address, fifo_size, Memory::Region::Access::ReadWrite));
|
||||
m_fifo_registers = TRY(Memory::map_typed<VMWareDisplayFIFORegisters volatile>(fifo_physical_address, fifo_size, Memory::Region::Access::ReadWrite));
|
||||
m_fifo_registers->start = 16;
|
||||
m_fifo_registers->size = 16 + (10 * 1024);
|
||||
m_fifo_registers->next_command = 16;
|
||||
|
|
|
@ -48,7 +48,7 @@ private:
|
|||
|
||||
VMWareGraphicsAdapter(PCI::DeviceIdentifier const&, NonnullOwnPtr<IOWindow> registers_io_window);
|
||||
|
||||
Memory::TypedMapping<volatile VMWareDisplayFIFORegisters> m_fifo_registers;
|
||||
Memory::TypedMapping<VMWareDisplayFIFORegisters volatile> m_fifo_registers;
|
||||
LockRefPtr<VMWareDisplayConnector> m_display_connector;
|
||||
mutable NonnullOwnPtr<IOWindow> m_registers_io_window;
|
||||
mutable Spinlock m_io_access_lock { LockRank::None };
|
||||
|
|
|
@ -13,17 +13,23 @@
|
|||
#define KMALLOC_SCRUB_BYTE 0xbb
|
||||
#define KFREE_SCRUB_BYTE 0xaa
|
||||
|
||||
#define MAKE_ALIGNED_ALLOCATED(type, alignment) \
|
||||
public: \
|
||||
[[nodiscard]] void* operator new(size_t) \
|
||||
{ \
|
||||
void* ptr = kmalloc_aligned(sizeof(type), alignment); \
|
||||
VERIFY(ptr); \
|
||||
return ptr; \
|
||||
} \
|
||||
[[nodiscard]] void* operator new(size_t, std::nothrow_t const&) noexcept { return kmalloc_aligned(sizeof(type), alignment); } \
|
||||
void operator delete(void* ptr) noexcept { kfree_aligned(ptr); } \
|
||||
\
|
||||
#define MAKE_ALIGNED_ALLOCATED(type, alignment) \
|
||||
public: \
|
||||
[[nodiscard]] void* operator new(size_t) \
|
||||
{ \
|
||||
void* ptr = kmalloc_aligned(sizeof(type), alignment); \
|
||||
VERIFY(ptr); \
|
||||
return ptr; \
|
||||
} \
|
||||
[[nodiscard]] void* operator new(size_t, std::nothrow_t const&) noexcept \
|
||||
{ \
|
||||
return kmalloc_aligned(sizeof(type), alignment); \
|
||||
} \
|
||||
void operator delete(void* ptr) noexcept \
|
||||
{ \
|
||||
kfree_aligned(ptr); \
|
||||
} \
|
||||
\
|
||||
private:
|
||||
|
||||
// The C++ standard specifies that the nothrow allocation tag should live in the std namespace.
|
||||
|
|
|
@ -53,7 +53,7 @@ ErrorOr<NonnullOwnPtr<IOWindow>> IOWindow::create_from_io_window_with_offset(u64
|
|||
return Error::from_errno(EOVERFLOW);
|
||||
#endif
|
||||
|
||||
auto memory_mapped_range = TRY(Memory::adopt_new_nonnull_own_typed_mapping<volatile u8>(m_memory_mapped_range->paddr.offset(offset), space_length, Memory::Region::Access::ReadWrite));
|
||||
auto memory_mapped_range = TRY(Memory::adopt_new_nonnull_own_typed_mapping<u8 volatile>(m_memory_mapped_range->paddr.offset(offset), space_length, Memory::Region::Access::ReadWrite));
|
||||
return TRY(adopt_nonnull_own_or_enomem(new (nothrow) IOWindow(move(memory_mapped_range))));
|
||||
}
|
||||
|
||||
|
@ -110,7 +110,7 @@ ErrorOr<NonnullOwnPtr<IOWindow>> IOWindow::create_for_pci_device_bar(PCI::Addres
|
|||
return Error::from_errno(EOVERFLOW);
|
||||
if (pci_bar_space_type == PCI::BARSpaceType::Memory64BitSpace && Checked<u64>::addition_would_overflow(pci_bar_value, space_length))
|
||||
return Error::from_errno(EOVERFLOW);
|
||||
auto memory_mapped_range = TRY(Memory::adopt_new_nonnull_own_typed_mapping<volatile u8>(PhysicalAddress(pci_bar_value & 0xfffffff0), space_length, Memory::Region::Access::ReadWrite));
|
||||
auto memory_mapped_range = TRY(Memory::adopt_new_nonnull_own_typed_mapping<u8 volatile>(PhysicalAddress(pci_bar_value & 0xfffffff0), space_length, Memory::Region::Access::ReadWrite));
|
||||
return TRY(adopt_nonnull_own_or_enomem(new (nothrow) IOWindow(move(memory_mapped_range))));
|
||||
}
|
||||
|
||||
|
@ -131,7 +131,7 @@ ErrorOr<NonnullOwnPtr<IOWindow>> IOWindow::create_for_pci_device_bar(PCI::Device
|
|||
return create_for_pci_device_bar(pci_device_identifier.address(), pci_bar, space_length);
|
||||
}
|
||||
|
||||
IOWindow::IOWindow(NonnullOwnPtr<Memory::TypedMapping<volatile u8>> memory_mapped_range)
|
||||
IOWindow::IOWindow(NonnullOwnPtr<Memory::TypedMapping<u8 volatile>> memory_mapped_range)
|
||||
: m_space_type(SpaceType::Memory)
|
||||
, m_memory_mapped_range(move(memory_mapped_range))
|
||||
{
|
||||
|
|
|
@ -75,7 +75,7 @@ public:
|
|||
#endif
|
||||
|
||||
private:
|
||||
explicit IOWindow(NonnullOwnPtr<Memory::TypedMapping<volatile u8>>);
|
||||
explicit IOWindow(NonnullOwnPtr<Memory::TypedMapping<u8 volatile>>);
|
||||
|
||||
u8 volatile* as_memory_address_pointer();
|
||||
|
||||
|
@ -116,7 +116,7 @@ private:
|
|||
// can cause problems with strict bare metal hardware. For example, some XHCI USB controllers
|
||||
// might completely lock up because of an unaligned memory access to their registers.
|
||||
VERIFY((start_offset % sizeof(T)) == 0);
|
||||
data = *(volatile T*)(as_memory_address_pointer() + start_offset);
|
||||
data = *(T volatile*)(as_memory_address_pointer() + start_offset);
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
|
@ -135,12 +135,12 @@ private:
|
|||
// can cause problems with strict bare metal hardware. For example, some XHCI USB controllers
|
||||
// might completely lock up because of an unaligned memory access to their registers.
|
||||
VERIFY((start_offset % sizeof(T)) == 0);
|
||||
*(volatile T*)(as_memory_address_pointer() + start_offset) = value;
|
||||
*(T volatile*)(as_memory_address_pointer() + start_offset) = value;
|
||||
}
|
||||
|
||||
SpaceType m_space_type { SpaceType::Memory };
|
||||
|
||||
OwnPtr<Memory::TypedMapping<volatile u8>> m_memory_mapped_range;
|
||||
OwnPtr<Memory::TypedMapping<u8 volatile>> m_memory_mapped_range;
|
||||
|
||||
#if ARCH(I386) || ARCH(X86_64)
|
||||
OwnPtr<IOAddressData> m_io_range;
|
||||
|
|
|
@ -129,12 +129,12 @@ public:
|
|||
};
|
||||
|
||||
LockRefPtr() = default;
|
||||
LockRefPtr(const T* ptr)
|
||||
LockRefPtr(T const* ptr)
|
||||
: m_bits(PtrTraits::as_bits(const_cast<T*>(ptr)))
|
||||
{
|
||||
ref_if_not_null(const_cast<T*>(ptr));
|
||||
}
|
||||
LockRefPtr(const T& object)
|
||||
LockRefPtr(T const& object)
|
||||
: m_bits(PtrTraits::as_bits(const_cast<T*>(&object)))
|
||||
{
|
||||
T* ptr = const_cast<T*>(&object);
|
||||
|
@ -156,18 +156,21 @@ public:
|
|||
{
|
||||
}
|
||||
template<typename U>
|
||||
ALWAYS_INLINE LockRefPtr(NonnullLockRefPtr<U> const& other) requires(IsConvertible<U*, T*>)
|
||||
ALWAYS_INLINE LockRefPtr(NonnullLockRefPtr<U> const& other)
|
||||
requires(IsConvertible<U*, T*>)
|
||||
: m_bits(PtrTraits::as_bits(const_cast<U*>(other.add_ref())))
|
||||
{
|
||||
}
|
||||
template<typename U>
|
||||
ALWAYS_INLINE LockRefPtr(NonnullLockRefPtr<U>&& other) requires(IsConvertible<U*, T*>)
|
||||
ALWAYS_INLINE LockRefPtr(NonnullLockRefPtr<U>&& other)
|
||||
requires(IsConvertible<U*, T*>)
|
||||
: m_bits(PtrTraits::as_bits(&other.leak_ref()))
|
||||
{
|
||||
VERIFY(!is_null());
|
||||
}
|
||||
template<typename U, typename P = LockRefPtrTraits<U>>
|
||||
LockRefPtr(LockRefPtr<U, P>&& other) requires(IsConvertible<U*, T*>)
|
||||
LockRefPtr(LockRefPtr<U, P>&& other)
|
||||
requires(IsConvertible<U*, T*>)
|
||||
: m_bits(PtrTraits::template convert_from<U, P>(other.leak_ref_raw()))
|
||||
{
|
||||
}
|
||||
|
@ -176,7 +179,8 @@ public:
|
|||
{
|
||||
}
|
||||
template<typename U, typename P = LockRefPtrTraits<U>>
|
||||
LockRefPtr(LockRefPtr<U, P> const& other) requires(IsConvertible<U*, T*>)
|
||||
LockRefPtr(LockRefPtr<U, P> const& other)
|
||||
requires(IsConvertible<U*, T*>)
|
||||
: m_bits(other.add_ref_raw())
|
||||
{
|
||||
}
|
||||
|
@ -205,7 +209,8 @@ public:
|
|||
}
|
||||
|
||||
template<typename U, typename P = LockRefPtrTraits<U>>
|
||||
void swap(LockRefPtr<U, P>& other) requires(IsConvertible<U*, T*>)
|
||||
void swap(LockRefPtr<U, P>& other)
|
||||
requires(IsConvertible<U*, T*>)
|
||||
{
|
||||
// NOTE: swap is not atomic!
|
||||
FlatPtr other_bits = P::exchange(other.m_bits, P::default_null_value);
|
||||
|
@ -221,14 +226,16 @@ public:
|
|||
}
|
||||
|
||||
template<typename U, typename P = LockRefPtrTraits<U>>
|
||||
ALWAYS_INLINE LockRefPtr& operator=(LockRefPtr<U, P>&& other) requires(IsConvertible<U*, T*>)
|
||||
ALWAYS_INLINE LockRefPtr& operator=(LockRefPtr<U, P>&& other)
|
||||
requires(IsConvertible<U*, T*>)
|
||||
{
|
||||
assign_raw(PtrTraits::template convert_from<U, P>(other.leak_ref_raw()));
|
||||
return *this;
|
||||
}
|
||||
|
||||
template<typename U>
|
||||
ALWAYS_INLINE LockRefPtr& operator=(NonnullLockRefPtr<U>&& other) requires(IsConvertible<U*, T*>)
|
||||
ALWAYS_INLINE LockRefPtr& operator=(NonnullLockRefPtr<U>&& other)
|
||||
requires(IsConvertible<U*, T*>)
|
||||
{
|
||||
assign_raw(PtrTraits::as_bits(&other.leak_ref()));
|
||||
return *this;
|
||||
|
@ -241,7 +248,8 @@ public:
|
|||
}
|
||||
|
||||
template<typename U>
|
||||
ALWAYS_INLINE LockRefPtr& operator=(NonnullLockRefPtr<U> const& other) requires(IsConvertible<U*, T*>)
|
||||
ALWAYS_INLINE LockRefPtr& operator=(NonnullLockRefPtr<U> const& other)
|
||||
requires(IsConvertible<U*, T*>)
|
||||
{
|
||||
assign_raw(PtrTraits::as_bits(other.add_ref()));
|
||||
return *this;
|
||||
|
@ -255,20 +263,21 @@ public:
|
|||
}
|
||||
|
||||
template<typename U>
|
||||
ALWAYS_INLINE LockRefPtr& operator=(LockRefPtr<U> const& other) requires(IsConvertible<U*, T*>)
|
||||
ALWAYS_INLINE LockRefPtr& operator=(LockRefPtr<U> const& other)
|
||||
requires(IsConvertible<U*, T*>)
|
||||
{
|
||||
assign_raw(other.add_ref_raw());
|
||||
return *this;
|
||||
}
|
||||
|
||||
ALWAYS_INLINE LockRefPtr& operator=(const T* ptr)
|
||||
ALWAYS_INLINE LockRefPtr& operator=(T const* ptr)
|
||||
{
|
||||
ref_if_not_null(const_cast<T*>(ptr));
|
||||
assign_raw(PtrTraits::as_bits(const_cast<T*>(ptr)));
|
||||
return *this;
|
||||
}
|
||||
|
||||
ALWAYS_INLINE LockRefPtr& operator=(const T& object)
|
||||
ALWAYS_INLINE LockRefPtr& operator=(T const& object)
|
||||
{
|
||||
const_cast<T&>(object).ref();
|
||||
assign_raw(PtrTraits::as_bits(const_cast<T*>(&object)));
|
||||
|
@ -317,14 +326,14 @@ public:
|
|||
}
|
||||
|
||||
ALWAYS_INLINE T* ptr() { return as_ptr(); }
|
||||
ALWAYS_INLINE const T* ptr() const { return as_ptr(); }
|
||||
ALWAYS_INLINE T const* ptr() const { return as_ptr(); }
|
||||
|
||||
ALWAYS_INLINE T* operator->()
|
||||
{
|
||||
return as_nonnull_ptr();
|
||||
}
|
||||
|
||||
ALWAYS_INLINE const T* operator->() const
|
||||
ALWAYS_INLINE T const* operator->() const
|
||||
{
|
||||
return as_nonnull_ptr();
|
||||
}
|
||||
|
@ -334,12 +343,12 @@ public:
|
|||
return *as_nonnull_ptr();
|
||||
}
|
||||
|
||||
ALWAYS_INLINE const T& operator*() const
|
||||
ALWAYS_INLINE T const& operator*() const
|
||||
{
|
||||
return *as_nonnull_ptr();
|
||||
}
|
||||
|
||||
ALWAYS_INLINE operator const T*() const { return as_ptr(); }
|
||||
ALWAYS_INLINE operator T const*() const { return as_ptr(); }
|
||||
ALWAYS_INLINE operator T*() { return as_ptr(); }
|
||||
|
||||
ALWAYS_INLINE operator bool() { return !is_null(); }
|
||||
|
@ -353,8 +362,8 @@ public:
|
|||
bool operator==(LockRefPtr& other) { return as_ptr() == other.as_ptr(); }
|
||||
bool operator!=(LockRefPtr& other) { return as_ptr() != other.as_ptr(); }
|
||||
|
||||
bool operator==(const T* other) const { return as_ptr() == other; }
|
||||
bool operator!=(const T* other) const { return as_ptr() != other; }
|
||||
bool operator==(T const* other) const { return as_ptr() == other; }
|
||||
bool operator!=(T const* other) const { return as_ptr() != other; }
|
||||
|
||||
bool operator==(T* other) { return as_ptr() == other; }
|
||||
bool operator!=(T* other) { return as_ptr() != other; }
|
||||
|
@ -363,7 +372,7 @@ public:
|
|||
|
||||
template<typename U = T>
|
||||
typename PtrTraits::NullType null_value() const
|
||||
requires(IsSame<U, T> && !IsNullPointer<typename PtrTraits::NullType>)
|
||||
requires(IsSame<U, T> && !IsNullPointer<typename PtrTraits::NullType>)
|
||||
{
|
||||
// make sure we are holding a null value
|
||||
FlatPtr bits = m_bits.load(AK::MemoryOrder::memory_order_relaxed);
|
||||
|
@ -371,7 +380,8 @@ public:
|
|||
return PtrTraits::to_null_value(bits);
|
||||
}
|
||||
template<typename U = T>
|
||||
void set_null_value(typename PtrTraits::NullType value) requires(IsSame<U, T> && !IsNullPointer<typename PtrTraits::NullType>)
|
||||
void set_null_value(typename PtrTraits::NullType value)
|
||||
requires(IsSame<U, T> && !IsNullPointer<typename PtrTraits::NullType>)
|
||||
{
|
||||
// make sure that new null value would be interpreted as a null value
|
||||
FlatPtr bits = PtrTraits::from_null_value(value);
|
||||
|
@ -445,17 +455,17 @@ private:
|
|||
};
|
||||
|
||||
template<typename T>
|
||||
struct Formatter<LockRefPtr<T>> : Formatter<const T*> {
|
||||
struct Formatter<LockRefPtr<T>> : Formatter<T const*> {
|
||||
ErrorOr<void> format(FormatBuilder& builder, LockRefPtr<T> const& value)
|
||||
{
|
||||
return Formatter<const T*>::format(builder, value.ptr());
|
||||
return Formatter<T const*>::format(builder, value.ptr());
|
||||
}
|
||||
};
|
||||
|
||||
template<typename T>
|
||||
struct Traits<LockRefPtr<T>> : public GenericTraits<LockRefPtr<T>> {
|
||||
using PeekType = T*;
|
||||
using ConstPeekType = const T*;
|
||||
using ConstPeekType = T const*;
|
||||
static unsigned hash(LockRefPtr<T> const& p) { return ptr_hash(p.ptr()); }
|
||||
static bool equals(LockRefPtr<T> const& a, LockRefPtr<T> const& b) { return a.ptr() == b.ptr(); }
|
||||
};
|
||||
|
@ -463,17 +473,18 @@ struct Traits<LockRefPtr<T>> : public GenericTraits<LockRefPtr<T>> {
|
|||
template<typename T, typename U>
|
||||
inline NonnullLockRefPtr<T> static_ptr_cast(NonnullLockRefPtr<U> const& ptr)
|
||||
{
|
||||
return NonnullLockRefPtr<T>(static_cast<const T&>(*ptr));
|
||||
return NonnullLockRefPtr<T>(static_cast<T const&>(*ptr));
|
||||
}
|
||||
|
||||
template<typename T, typename U, typename PtrTraits = LockRefPtrTraits<T>>
|
||||
inline LockRefPtr<T> static_ptr_cast(LockRefPtr<U> const& ptr)
|
||||
{
|
||||
return LockRefPtr<T, PtrTraits>(static_cast<const T*>(ptr.ptr()));
|
||||
return LockRefPtr<T, PtrTraits>(static_cast<T const*>(ptr.ptr()));
|
||||
}
|
||||
|
||||
template<typename T, typename PtrTraitsT, typename U, typename PtrTraitsU>
|
||||
inline void swap(LockRefPtr<T, PtrTraitsT>& a, LockRefPtr<U, PtrTraitsU>& b) requires(IsConvertible<U*, T*>)
|
||||
inline void swap(LockRefPtr<T, PtrTraitsT>& a, LockRefPtr<U, PtrTraitsU>& b)
|
||||
requires(IsConvertible<U*, T*>)
|
||||
{
|
||||
a.swap(b);
|
||||
}
|
||||
|
|
|
@ -19,26 +19,30 @@ public:
|
|||
LockWeakPtr() = default;
|
||||
|
||||
template<typename U>
|
||||
LockWeakPtr(WeakPtr<U> const& other) requires(IsBaseOf<T, U>)
|
||||
LockWeakPtr(WeakPtr<U> const& other)
|
||||
requires(IsBaseOf<T, U>)
|
||||
: m_link(other.m_link)
|
||||
{
|
||||
}
|
||||
|
||||
template<typename U>
|
||||
LockWeakPtr(WeakPtr<U>&& other) requires(IsBaseOf<T, U>)
|
||||
LockWeakPtr(WeakPtr<U>&& other)
|
||||
requires(IsBaseOf<T, U>)
|
||||
: m_link(other.take_link())
|
||||
{
|
||||
}
|
||||
|
||||
template<typename U>
|
||||
LockWeakPtr& operator=(WeakPtr<U>&& other) requires(IsBaseOf<T, U>)
|
||||
LockWeakPtr& operator=(WeakPtr<U>&& other)
|
||||
requires(IsBaseOf<T, U>)
|
||||
{
|
||||
m_link = other.take_link();
|
||||
return *this;
|
||||
}
|
||||
|
||||
template<typename U>
|
||||
LockWeakPtr& operator=(WeakPtr<U> const& other) requires(IsBaseOf<T, U>)
|
||||
LockWeakPtr& operator=(WeakPtr<U> const& other)
|
||||
requires(IsBaseOf<T, U>)
|
||||
{
|
||||
if ((void const*)this != (void const*)&other)
|
||||
m_link = other.m_link;
|
||||
|
@ -52,20 +56,23 @@ public:
|
|||
}
|
||||
|
||||
template<typename U>
|
||||
LockWeakPtr(const U& object) requires(IsBaseOf<T, U>)
|
||||
LockWeakPtr(U const& object)
|
||||
requires(IsBaseOf<T, U>)
|
||||
: m_link(object.template try_make_weak_ptr<U>().release_value_but_fixme_should_propagate_errors().take_link())
|
||||
{
|
||||
}
|
||||
|
||||
template<typename U>
|
||||
LockWeakPtr(const U* object) requires(IsBaseOf<T, U>)
|
||||
LockWeakPtr(U const* object)
|
||||
requires(IsBaseOf<T, U>)
|
||||
{
|
||||
if (object)
|
||||
m_link = object->template try_make_weak_ptr<U>().release_value_but_fixme_should_propagate_errors().take_link();
|
||||
}
|
||||
|
||||
template<typename U>
|
||||
LockWeakPtr(LockRefPtr<U> const& object) requires(IsBaseOf<T, U>)
|
||||
LockWeakPtr(LockRefPtr<U> const& object)
|
||||
requires(IsBaseOf<T, U>)
|
||||
{
|
||||
object.do_while_locked([&](U* obj) {
|
||||
if (obj)
|
||||
|
@ -74,7 +81,8 @@ public:
|
|||
}
|
||||
|
||||
template<typename U>
|
||||
LockWeakPtr(NonnullLockRefPtr<U> const& object) requires(IsBaseOf<T, U>)
|
||||
LockWeakPtr(NonnullLockRefPtr<U> const& object)
|
||||
requires(IsBaseOf<T, U>)
|
||||
{
|
||||
object.do_while_locked([&](U* obj) {
|
||||
if (obj)
|
||||
|
@ -83,14 +91,16 @@ public:
|
|||
}
|
||||
|
||||
template<typename U>
|
||||
LockWeakPtr& operator=(const U& object) requires(IsBaseOf<T, U>)
|
||||
LockWeakPtr& operator=(U const& object)
|
||||
requires(IsBaseOf<T, U>)
|
||||
{
|
||||
m_link = object.template try_make_weak_ptr<U>().release_value_but_fixme_should_propagate_errors().take_link();
|
||||
return *this;
|
||||
}
|
||||
|
||||
template<typename U>
|
||||
LockWeakPtr& operator=(const U* object) requires(IsBaseOf<T, U>)
|
||||
LockWeakPtr& operator=(U const* object)
|
||||
requires(IsBaseOf<T, U>)
|
||||
{
|
||||
if (object)
|
||||
m_link = object->template try_make_weak_ptr<U>().release_value_but_fixme_should_propagate_errors().take_link();
|
||||
|
@ -100,7 +110,8 @@ public:
|
|||
}
|
||||
|
||||
template<typename U>
|
||||
LockWeakPtr& operator=(LockRefPtr<U> const& object) requires(IsBaseOf<T, U>)
|
||||
LockWeakPtr& operator=(LockRefPtr<U> const& object)
|
||||
requires(IsBaseOf<T, U>)
|
||||
{
|
||||
object.do_while_locked([&](U* obj) {
|
||||
if (obj)
|
||||
|
@ -112,7 +123,8 @@ public:
|
|||
}
|
||||
|
||||
template<typename U>
|
||||
LockWeakPtr& operator=(NonnullLockRefPtr<U> const& object) requires(IsBaseOf<T, U>)
|
||||
LockWeakPtr& operator=(NonnullLockRefPtr<U> const& object)
|
||||
requires(IsBaseOf<T, U>)
|
||||
{
|
||||
object.do_while_locked([&](U* obj) {
|
||||
if (obj)
|
||||
|
@ -175,7 +187,7 @@ inline ErrorOr<LockWeakPtr<U>> LockWeakable<T>::try_make_weak_ptr() const
|
|||
// to add a ref (which should fail if the ref count is at 0) so
|
||||
// that we prevent the destructor and revoke_weak_ptrs from being
|
||||
// triggered until we're done.
|
||||
if (!static_cast<const T*>(this)->try_ref())
|
||||
if (!static_cast<T const*>(this)->try_ref())
|
||||
return LockWeakPtr<U> {};
|
||||
} else {
|
||||
// For non-RefCounted types this means a weak reference can be
|
||||
|
@ -187,14 +199,14 @@ inline ErrorOr<LockWeakPtr<U>> LockWeakable<T>::try_make_weak_ptr() const
|
|||
// There is a small chance that we create a new WeakLink and throw
|
||||
// it away because another thread beat us to it. But the window is
|
||||
// pretty small and the overhead isn't terrible.
|
||||
m_link.assign_if_null(TRY(adopt_nonnull_lock_ref_or_enomem(new (nothrow) WeakLink(const_cast<T&>(static_cast<const T&>(*this))))));
|
||||
m_link.assign_if_null(TRY(adopt_nonnull_lock_ref_or_enomem(new (nothrow) WeakLink(const_cast<T&>(static_cast<T const&>(*this))))));
|
||||
}
|
||||
|
||||
LockWeakPtr<U> weak_ptr(m_link);
|
||||
|
||||
if constexpr (IsBaseOf<AtomicRefCountedBase, T>) {
|
||||
// Now drop the reference we temporarily added
|
||||
if (static_cast<const T*>(this)->unref()) {
|
||||
if (static_cast<T const*>(this)->unref()) {
|
||||
// We just dropped the last reference, which should have called
|
||||
// revoke_weak_ptrs, which should have invalidated our weak_ptr
|
||||
VERIFY(!weak_ptr.strong_ref());
|
||||
|
@ -205,11 +217,11 @@ inline ErrorOr<LockWeakPtr<U>> LockWeakable<T>::try_make_weak_ptr() const
|
|||
}
|
||||
|
||||
template<typename T>
|
||||
struct Formatter<LockWeakPtr<T>> : Formatter<const T*> {
|
||||
struct Formatter<LockWeakPtr<T>> : Formatter<T const*> {
|
||||
ErrorOr<void> format(FormatBuilder& builder, LockWeakPtr<T> const& value)
|
||||
{
|
||||
auto ref = value.strong_ref();
|
||||
return Formatter<const T*>::format(builder, ref.ptr());
|
||||
return Formatter<T const*>::format(builder, ref.ptr());
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -30,7 +30,7 @@ class WeakLink final : public AtomicRefCounted<WeakLink> {
|
|||
public:
|
||||
template<typename T, typename PtrTraits = LockRefPtrTraits<T>>
|
||||
LockRefPtr<T, PtrTraits> strong_ref() const
|
||||
requires(IsBaseOf<AtomicRefCountedBase, T>)
|
||||
requires(IsBaseOf<AtomicRefCountedBase, T>)
|
||||
{
|
||||
LockRefPtr<T, PtrTraits> ref;
|
||||
|
||||
|
|
|
@ -47,7 +47,8 @@ public:
|
|||
const_cast<T&>(object).ref();
|
||||
}
|
||||
template<typename U>
|
||||
ALWAYS_INLINE NonnullLockRefPtr(U const& object) requires(IsConvertible<U*, T*>)
|
||||
ALWAYS_INLINE NonnullLockRefPtr(U const& object)
|
||||
requires(IsConvertible<U*, T*>)
|
||||
: m_bits((FlatPtr) static_cast<T const*>(&object))
|
||||
{
|
||||
VERIFY(!(m_bits & 1));
|
||||
|
@ -64,7 +65,8 @@ public:
|
|||
VERIFY(!(m_bits & 1));
|
||||
}
|
||||
template<typename U>
|
||||
ALWAYS_INLINE NonnullLockRefPtr(NonnullLockRefPtr<U>&& other) requires(IsConvertible<U*, T*>)
|
||||
ALWAYS_INLINE NonnullLockRefPtr(NonnullLockRefPtr<U>&& other)
|
||||
requires(IsConvertible<U*, T*>)
|
||||
: m_bits((FlatPtr)&other.leak_ref())
|
||||
{
|
||||
VERIFY(!(m_bits & 1));
|
||||
|
@ -75,7 +77,8 @@ public:
|
|||
VERIFY(!(m_bits & 1));
|
||||
}
|
||||
template<typename U>
|
||||
ALWAYS_INLINE NonnullLockRefPtr(NonnullLockRefPtr<U> const& other) requires(IsConvertible<U*, T*>)
|
||||
ALWAYS_INLINE NonnullLockRefPtr(NonnullLockRefPtr<U> const& other)
|
||||
requires(IsConvertible<U*, T*>)
|
||||
: m_bits((FlatPtr)other.add_ref())
|
||||
{
|
||||
VERIFY(!(m_bits & 1));
|
||||
|
@ -108,7 +111,8 @@ public:
|
|||
}
|
||||
|
||||
template<typename U>
|
||||
NonnullLockRefPtr& operator=(NonnullLockRefPtr<U> const& other) requires(IsConvertible<U*, T*>)
|
||||
NonnullLockRefPtr& operator=(NonnullLockRefPtr<U> const& other)
|
||||
requires(IsConvertible<U*, T*>)
|
||||
{
|
||||
assign(other.add_ref());
|
||||
return *this;
|
||||
|
@ -122,7 +126,8 @@ public:
|
|||
}
|
||||
|
||||
template<typename U>
|
||||
NonnullLockRefPtr& operator=(NonnullLockRefPtr<U>&& other) requires(IsConvertible<U*, T*>)
|
||||
NonnullLockRefPtr& operator=(NonnullLockRefPtr<U>&& other)
|
||||
requires(IsConvertible<U*, T*>)
|
||||
{
|
||||
assign(&other.leak_ref());
|
||||
return *this;
|
||||
|
@ -202,7 +207,8 @@ public:
|
|||
}
|
||||
|
||||
template<typename U>
|
||||
void swap(NonnullLockRefPtr<U>& other) requires(IsConvertible<U*, T*>)
|
||||
void swap(NonnullLockRefPtr<U>& other)
|
||||
requires(IsConvertible<U*, T*>)
|
||||
{
|
||||
// NOTE: swap is not atomic!
|
||||
U* other_ptr = other.exchange(nullptr);
|
||||
|
@ -318,7 +324,8 @@ struct Formatter<NonnullLockRefPtr<T>> : Formatter<T const*> {
|
|||
};
|
||||
|
||||
template<typename T, typename U>
|
||||
inline void swap(NonnullLockRefPtr<T>& a, NonnullLockRefPtr<U>& b) requires(IsConvertible<U*, T*>)
|
||||
inline void swap(NonnullLockRefPtr<T>& a, NonnullLockRefPtr<U>& b)
|
||||
requires(IsConvertible<U*, T*>)
|
||||
{
|
||||
a.swap(b);
|
||||
}
|
||||
|
|
|
@ -32,11 +32,23 @@ private:
|
|||
ALWAYS_INLINE U const* operator->() const { return &m_value; }
|
||||
ALWAYS_INLINE U const& operator*() const { return m_value; }
|
||||
|
||||
ALWAYS_INLINE U* operator->() requires(!IsConst<U>) { return &m_value; }
|
||||
ALWAYS_INLINE U& operator*() requires(!IsConst<U>) { return m_value; }
|
||||
ALWAYS_INLINE U* operator->()
|
||||
requires(!IsConst<U>)
|
||||
{
|
||||
return &m_value;
|
||||
}
|
||||
ALWAYS_INLINE U& operator*()
|
||||
requires(!IsConst<U>)
|
||||
{
|
||||
return m_value;
|
||||
}
|
||||
|
||||
ALWAYS_INLINE U const& get() const { return &m_value; }
|
||||
ALWAYS_INLINE U& get() requires(!IsConst<U>) { return &m_value; }
|
||||
ALWAYS_INLINE U& get()
|
||||
requires(!IsConst<U>)
|
||||
{
|
||||
return &m_value;
|
||||
}
|
||||
|
||||
private:
|
||||
U& m_value;
|
||||
|
|
|
@ -15,12 +15,12 @@ namespace Kernel::Memory {
|
|||
|
||||
template<typename T>
|
||||
struct TypedMapping {
|
||||
const T* ptr() const { return reinterpret_cast<const T*>(region->vaddr().offset(offset).as_ptr()); }
|
||||
T const* ptr() const { return reinterpret_cast<T const*>(region->vaddr().offset(offset).as_ptr()); }
|
||||
T* ptr() { return reinterpret_cast<T*>(region->vaddr().offset(offset).as_ptr()); }
|
||||
VirtualAddress base_address() const { return region->vaddr().offset(offset); }
|
||||
const T* operator->() const { return ptr(); }
|
||||
T const* operator->() const { return ptr(); }
|
||||
T* operator->() { return ptr(); }
|
||||
const T& operator*() const { return *ptr(); }
|
||||
T const& operator*() const { return *ptr(); }
|
||||
T& operator*() { return *ptr(); }
|
||||
OwnPtr<Region> region;
|
||||
PhysicalAddress paddr;
|
||||
|
|
|
@ -62,7 +62,7 @@ public:
|
|||
}
|
||||
|
||||
template<typename T>
|
||||
void add_random_event(const T& event_data, size_t pool)
|
||||
void add_random_event(T const& event_data, size_t pool)
|
||||
{
|
||||
pool %= pool_count;
|
||||
if (pool == 0) {
|
||||
|
@ -159,7 +159,7 @@ public:
|
|||
}
|
||||
|
||||
template<typename T>
|
||||
void add_random_event(const T& event_data)
|
||||
void add_random_event(T const& event_data)
|
||||
{
|
||||
auto& kernel_rng = KernelRng::the();
|
||||
SpinlockLocker lock(kernel_rng.get_lock());
|
||||
|
|
|
@ -54,11 +54,11 @@ ErrorOr<Time> copy_time_from_user(timeval const* tv_user)
|
|||
}
|
||||
|
||||
template<>
|
||||
ErrorOr<Time> copy_time_from_user<const timeval>(Userspace<timeval const*> src) { return copy_time_from_user(src.unsafe_userspace_ptr()); }
|
||||
ErrorOr<Time> copy_time_from_user<timeval const>(Userspace<timeval const*> src) { return copy_time_from_user(src.unsafe_userspace_ptr()); }
|
||||
template<>
|
||||
ErrorOr<Time> copy_time_from_user<timeval>(Userspace<timeval*> src) { return copy_time_from_user(src.unsafe_userspace_ptr()); }
|
||||
template<>
|
||||
ErrorOr<Time> copy_time_from_user<const timespec>(Userspace<timespec const*> src) { return copy_time_from_user(src.unsafe_userspace_ptr()); }
|
||||
ErrorOr<Time> copy_time_from_user<timespec const>(Userspace<timespec const*> src) { return copy_time_from_user(src.unsafe_userspace_ptr()); }
|
||||
template<>
|
||||
ErrorOr<Time> copy_time_from_user<timespec>(Userspace<timespec*> src) { return copy_time_from_user(src.unsafe_userspace_ptr()); }
|
||||
|
||||
|
|
|
@ -52,21 +52,21 @@ void const* memmem(void const* haystack, size_t, void const* needle, size_t);
|
|||
}
|
||||
|
||||
template<typename T>
|
||||
[[nodiscard]] inline ErrorOr<void> copy_from_user(T* dest, const T* src)
|
||||
[[nodiscard]] inline ErrorOr<void> copy_from_user(T* dest, T const* src)
|
||||
{
|
||||
static_assert(IsTriviallyCopyable<T>);
|
||||
return copy_from_user(dest, src, sizeof(T));
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
[[nodiscard]] inline ErrorOr<void> copy_to_user(T* dest, const T* src)
|
||||
[[nodiscard]] inline ErrorOr<void> copy_to_user(T* dest, T const* src)
|
||||
{
|
||||
static_assert(IsTriviallyCopyable<T>);
|
||||
return copy_to_user(dest, src, sizeof(T));
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
[[nodiscard]] inline ErrorOr<void> copy_from_user(T* dest, Userspace<const T*> src)
|
||||
[[nodiscard]] inline ErrorOr<void> copy_from_user(T* dest, Userspace<T const*> src)
|
||||
{
|
||||
static_assert(IsTriviallyCopyable<T>);
|
||||
return copy_from_user(dest, src.unsafe_userspace_ptr(), sizeof(T));
|
||||
|
@ -100,7 +100,7 @@ DEPRECATE_COPY_FROM_USER_TYPE(timespec, copy_time_from_user)
|
|||
DEPRECATE_COPY_FROM_USER_TYPE(timeval, copy_time_from_user)
|
||||
|
||||
template<typename T>
|
||||
[[nodiscard]] inline ErrorOr<void> copy_to_user(Userspace<T*> dest, const T* src)
|
||||
[[nodiscard]] inline ErrorOr<void> copy_to_user(Userspace<T*> dest, T const* src)
|
||||
{
|
||||
static_assert(IsTriviallyCopyable<T>);
|
||||
return copy_to_user(dest.unsafe_userspace_ptr(), src, sizeof(T));
|
||||
|
@ -114,14 +114,14 @@ template<typename T>
|
|||
}
|
||||
|
||||
template<typename T>
|
||||
[[nodiscard]] inline ErrorOr<void> copy_from_user(void* dest, Userspace<const T*> src, size_t size)
|
||||
[[nodiscard]] inline ErrorOr<void> copy_from_user(void* dest, Userspace<T const*> src, size_t size)
|
||||
{
|
||||
static_assert(IsTriviallyCopyable<T>);
|
||||
return copy_from_user(dest, src.unsafe_userspace_ptr(), size);
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
[[nodiscard]] inline ErrorOr<void> copy_n_from_user(T* dest, const T* src, size_t count)
|
||||
[[nodiscard]] inline ErrorOr<void> copy_n_from_user(T* dest, T const* src, size_t count)
|
||||
{
|
||||
static_assert(IsTriviallyCopyable<T>);
|
||||
Checked<size_t> size = sizeof(T);
|
||||
|
@ -132,7 +132,7 @@ template<typename T>
|
|||
}
|
||||
|
||||
template<typename T>
|
||||
[[nodiscard]] inline ErrorOr<void> copy_n_to_user(T* dest, const T* src, size_t count)
|
||||
[[nodiscard]] inline ErrorOr<void> copy_n_to_user(T* dest, T const* src, size_t count)
|
||||
{
|
||||
static_assert(IsTriviallyCopyable<T>);
|
||||
Checked<size_t> size = sizeof(T);
|
||||
|
@ -143,7 +143,7 @@ template<typename T>
|
|||
}
|
||||
|
||||
template<typename T>
|
||||
[[nodiscard]] inline ErrorOr<void> copy_n_from_user(T* dest, Userspace<const T*> src, size_t count)
|
||||
[[nodiscard]] inline ErrorOr<void> copy_n_from_user(T* dest, Userspace<T const*> src, size_t count)
|
||||
{
|
||||
static_assert(IsTriviallyCopyable<T>);
|
||||
Checked<size_t> size = sizeof(T);
|
||||
|
@ -154,7 +154,7 @@ template<typename T>
|
|||
}
|
||||
|
||||
template<typename T>
|
||||
[[nodiscard]] inline ErrorOr<void> try_copy_n_to_user(Userspace<T*> dest, const T* src, size_t count)
|
||||
[[nodiscard]] inline ErrorOr<void> try_copy_n_to_user(Userspace<T*> dest, T const* src, size_t count)
|
||||
{
|
||||
static_assert(IsTriviallyCopyable<T>);
|
||||
Checked<size_t> size = sizeof(T);
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
|
||||
namespace Kernel {
|
||||
|
||||
UNMAP_AFTER_INIT NVMeInterruptQueue::NVMeInterruptQueue(NonnullOwnPtr<Memory::Region> rw_dma_region, Memory::PhysicalPage const& rw_dma_page, u16 qid, u8 irq, u32 q_depth, OwnPtr<Memory::Region> cq_dma_region, NonnullRefPtrVector<Memory::PhysicalPage> cq_dma_page, OwnPtr<Memory::Region> sq_dma_region, NonnullRefPtrVector<Memory::PhysicalPage> sq_dma_page, Memory::TypedMapping<volatile DoorbellRegister> db_regs)
|
||||
UNMAP_AFTER_INIT NVMeInterruptQueue::NVMeInterruptQueue(NonnullOwnPtr<Memory::Region> rw_dma_region, Memory::PhysicalPage const& rw_dma_page, u16 qid, u8 irq, u32 q_depth, OwnPtr<Memory::Region> cq_dma_region, NonnullRefPtrVector<Memory::PhysicalPage> cq_dma_page, OwnPtr<Memory::Region> sq_dma_region, NonnullRefPtrVector<Memory::PhysicalPage> sq_dma_page, Memory::TypedMapping<DoorbellRegister volatile> db_regs)
|
||||
: NVMeQueue(move(rw_dma_region), rw_dma_page, qid, q_depth, move(cq_dma_region), cq_dma_page, move(sq_dma_region), sq_dma_page, move(db_regs))
|
||||
, IRQHandler(irq)
|
||||
{
|
||||
|
|
|
@ -13,7 +13,7 @@ namespace Kernel {
|
|||
class NVMeInterruptQueue : public NVMeQueue
|
||||
, public IRQHandler {
|
||||
public:
|
||||
NVMeInterruptQueue(NonnullOwnPtr<Memory::Region> rw_dma_region, Memory::PhysicalPage const& rw_dma_page, u16 qid, u8 irq, u32 q_depth, OwnPtr<Memory::Region> cq_dma_region, NonnullRefPtrVector<Memory::PhysicalPage> cq_dma_page, OwnPtr<Memory::Region> sq_dma_region, NonnullRefPtrVector<Memory::PhysicalPage> sq_dma_page, Memory::TypedMapping<volatile DoorbellRegister> db_regs);
|
||||
NVMeInterruptQueue(NonnullOwnPtr<Memory::Region> rw_dma_region, Memory::PhysicalPage const& rw_dma_page, u16 qid, u8 irq, u32 q_depth, OwnPtr<Memory::Region> cq_dma_region, NonnullRefPtrVector<Memory::PhysicalPage> cq_dma_page, OwnPtr<Memory::Region> sq_dma_region, NonnullRefPtrVector<Memory::PhysicalPage> sq_dma_page, Memory::TypedMapping<DoorbellRegister volatile> db_regs);
|
||||
void submit_sqe(NVMeSubmission& submission) override;
|
||||
virtual ~NVMeInterruptQueue() override {};
|
||||
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
#include <Kernel/Storage/NVMe/NVMePollQueue.h>
|
||||
|
||||
namespace Kernel {
|
||||
UNMAP_AFTER_INIT NVMePollQueue::NVMePollQueue(NonnullOwnPtr<Memory::Region> rw_dma_region, Memory::PhysicalPage const& rw_dma_page, u16 qid, u32 q_depth, OwnPtr<Memory::Region> cq_dma_region, NonnullRefPtrVector<Memory::PhysicalPage> cq_dma_page, OwnPtr<Memory::Region> sq_dma_region, NonnullRefPtrVector<Memory::PhysicalPage> sq_dma_page, Memory::TypedMapping<volatile DoorbellRegister> db_regs)
|
||||
UNMAP_AFTER_INIT NVMePollQueue::NVMePollQueue(NonnullOwnPtr<Memory::Region> rw_dma_region, Memory::PhysicalPage const& rw_dma_page, u16 qid, u32 q_depth, OwnPtr<Memory::Region> cq_dma_region, NonnullRefPtrVector<Memory::PhysicalPage> cq_dma_page, OwnPtr<Memory::Region> sq_dma_region, NonnullRefPtrVector<Memory::PhysicalPage> sq_dma_page, Memory::TypedMapping<DoorbellRegister volatile> db_regs)
|
||||
: NVMeQueue(move(rw_dma_region), rw_dma_page, qid, q_depth, move(cq_dma_region), cq_dma_page, move(sq_dma_region), sq_dma_page, move(db_regs))
|
||||
{
|
||||
}
|
||||
|
|
|
@ -12,7 +12,7 @@ namespace Kernel {
|
|||
|
||||
class NVMePollQueue : public NVMeQueue {
|
||||
public:
|
||||
NVMePollQueue(NonnullOwnPtr<Memory::Region> rw_dma_region, Memory::PhysicalPage const& rw_dma_page, u16 qid, u32 q_depth, OwnPtr<Memory::Region> cq_dma_region, NonnullRefPtrVector<Memory::PhysicalPage> cq_dma_page, OwnPtr<Memory::Region> sq_dma_region, NonnullRefPtrVector<Memory::PhysicalPage> sq_dma_page, Memory::TypedMapping<volatile DoorbellRegister> db_regs);
|
||||
NVMePollQueue(NonnullOwnPtr<Memory::Region> rw_dma_region, Memory::PhysicalPage const& rw_dma_page, u16 qid, u32 q_depth, OwnPtr<Memory::Region> cq_dma_region, NonnullRefPtrVector<Memory::PhysicalPage> cq_dma_page, OwnPtr<Memory::Region> sq_dma_region, NonnullRefPtrVector<Memory::PhysicalPage> sq_dma_page, Memory::TypedMapping<DoorbellRegister volatile> db_regs);
|
||||
void submit_sqe(NVMeSubmission& submission) override;
|
||||
virtual ~NVMePollQueue() override {};
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
#include <Kernel/Storage/NVMe/NVMeQueue.h>
|
||||
|
||||
namespace Kernel {
|
||||
ErrorOr<NonnullLockRefPtr<NVMeQueue>> NVMeQueue::try_create(u16 qid, Optional<u8> irq, u32 q_depth, OwnPtr<Memory::Region> cq_dma_region, NonnullRefPtrVector<Memory::PhysicalPage> cq_dma_page, OwnPtr<Memory::Region> sq_dma_region, NonnullRefPtrVector<Memory::PhysicalPage> sq_dma_page, Memory::TypedMapping<volatile DoorbellRegister> db_regs)
|
||||
ErrorOr<NonnullLockRefPtr<NVMeQueue>> NVMeQueue::try_create(u16 qid, Optional<u8> irq, u32 q_depth, OwnPtr<Memory::Region> cq_dma_region, NonnullRefPtrVector<Memory::PhysicalPage> cq_dma_page, OwnPtr<Memory::Region> sq_dma_region, NonnullRefPtrVector<Memory::PhysicalPage> sq_dma_page, Memory::TypedMapping<DoorbellRegister volatile> db_regs)
|
||||
{
|
||||
// Note: Allocate DMA region for RW operation. For now the requests don't exceed more than 4096 bytes (Storage device takes care of it)
|
||||
RefPtr<Memory::PhysicalPage> rw_dma_page;
|
||||
|
@ -25,7 +25,7 @@ ErrorOr<NonnullLockRefPtr<NVMeQueue>> NVMeQueue::try_create(u16 qid, Optional<u8
|
|||
return queue;
|
||||
}
|
||||
|
||||
UNMAP_AFTER_INIT NVMeQueue::NVMeQueue(NonnullOwnPtr<Memory::Region> rw_dma_region, Memory::PhysicalPage const& rw_dma_page, u16 qid, u32 q_depth, OwnPtr<Memory::Region> cq_dma_region, NonnullRefPtrVector<Memory::PhysicalPage> cq_dma_page, OwnPtr<Memory::Region> sq_dma_region, NonnullRefPtrVector<Memory::PhysicalPage> sq_dma_page, Memory::TypedMapping<volatile DoorbellRegister> db_regs)
|
||||
UNMAP_AFTER_INIT NVMeQueue::NVMeQueue(NonnullOwnPtr<Memory::Region> rw_dma_region, Memory::PhysicalPage const& rw_dma_page, u16 qid, u32 q_depth, OwnPtr<Memory::Region> cq_dma_region, NonnullRefPtrVector<Memory::PhysicalPage> cq_dma_page, OwnPtr<Memory::Region> sq_dma_region, NonnullRefPtrVector<Memory::PhysicalPage> sq_dma_page, Memory::TypedMapping<DoorbellRegister volatile> db_regs)
|
||||
: m_current_request(nullptr)
|
||||
, m_rw_dma_region(move(rw_dma_region))
|
||||
, m_qid(qid)
|
||||
|
|
|
@ -97,7 +97,8 @@ public:
|
|||
return "u64"sv;
|
||||
}
|
||||
|
||||
void generate(SourceGenerator& generator, StringView type, StringView name, size_t max_values_per_row) requires(!StorageTypeIsList<StorageType>)
|
||||
void generate(SourceGenerator& generator, StringView type, StringView name, size_t max_values_per_row)
|
||||
requires(!StorageTypeIsList<StorageType>)
|
||||
{
|
||||
generator.set("type"sv, type);
|
||||
generator.set("name"sv, name);
|
||||
|
@ -129,7 +130,8 @@ static constexpr Array<@type@, @size@ + 1> @name@ { {
|
|||
)~~~");
|
||||
}
|
||||
|
||||
void generate(SourceGenerator& generator, StringView type, StringView name) requires(StorageTypeIsList<StorageType>)
|
||||
void generate(SourceGenerator& generator, StringView type, StringView name)
|
||||
requires(StorageTypeIsList<StorageType>)
|
||||
{
|
||||
generator.set("type"sv, type);
|
||||
generator.set("name"sv, name);
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
#include <AK/BitCast.h>
|
||||
|
||||
template<typename A, typename B>
|
||||
void check_cast_both_ways(const A& a, const B& b)
|
||||
void check_cast_both_ways(A const& a, B const& b)
|
||||
{
|
||||
EXPECT_EQ((bit_cast<A, B>(b)), a);
|
||||
EXPECT_EQ((bit_cast<B, A>(a)), b);
|
||||
|
|
|
@ -103,7 +103,7 @@ TEST_CASE(can_subspan_as_intended)
|
|||
{
|
||||
static constexpr u16 buffer[8] { 1, 2, 3, 4, 5, 6, 7, 8 };
|
||||
|
||||
constexpr Span<const u16> span { buffer, 8 };
|
||||
constexpr Span<u16 const> span { buffer, 8 };
|
||||
constexpr auto slice = span.slice(3, 2);
|
||||
|
||||
static_assert(slice.size() == 2u);
|
||||
|
|
|
@ -166,7 +166,7 @@ TEST_CASE(IsAssignable)
|
|||
EXPECT_TRAIT_TRUE(IsTriviallyMoveAssignable, A);
|
||||
|
||||
struct B {
|
||||
B& operator=(const B&) { return *this; }
|
||||
B& operator=(B const&) { return *this; }
|
||||
B& operator=(B&&) { return *this; }
|
||||
};
|
||||
EXPECT_TRAIT_TRUE(IsCopyAssignable, B);
|
||||
|
@ -175,7 +175,7 @@ TEST_CASE(IsAssignable)
|
|||
EXPECT_TRAIT_FALSE(IsTriviallyMoveAssignable, B);
|
||||
|
||||
struct C {
|
||||
C& operator=(const C&) = delete;
|
||||
C& operator=(C const&) = delete;
|
||||
C& operator=(C&&) = delete;
|
||||
};
|
||||
EXPECT_TRAIT_FALSE(IsCopyAssignable, C);
|
||||
|
@ -194,7 +194,7 @@ TEST_CASE(IsConstructible)
|
|||
EXPECT_TRAIT_TRUE(IsTriviallyMoveConstructible, A);
|
||||
|
||||
struct B {
|
||||
B(const B&)
|
||||
B(B const&)
|
||||
{
|
||||
}
|
||||
B(B&&)
|
||||
|
@ -207,7 +207,7 @@ TEST_CASE(IsConstructible)
|
|||
EXPECT_TRAIT_FALSE(IsTriviallyMoveConstructible, B);
|
||||
|
||||
struct C {
|
||||
C(const C&) = delete;
|
||||
C(C const&) = delete;
|
||||
C(C&&) = delete;
|
||||
};
|
||||
EXPECT_TRAIT_FALSE(IsCopyConstructible, C);
|
||||
|
|
|
@ -24,7 +24,8 @@ static PosixOptions match_test_api_options(const PosixOptions options)
|
|||
}
|
||||
|
||||
template<typename... Flags>
|
||||
static constexpr ECMAScriptFlags combine_flags(Flags&&... flags) requires((IsSame<Flags, ECMAScriptFlags> && ...))
|
||||
static constexpr ECMAScriptFlags combine_flags(Flags&&... flags)
|
||||
requires((IsSame<Flags, ECMAScriptFlags> && ...))
|
||||
{
|
||||
return static_cast<ECMAScriptFlags>((static_cast<regex::FlagsUnderlyingType>(flags) | ...));
|
||||
}
|
||||
|
|
|
@ -107,7 +107,7 @@ public:
|
|||
size_t index() const { return m_index; }
|
||||
size_t size() const { return m_xsv.headers().size(); }
|
||||
|
||||
using ConstIterator = AK::SimpleIterator<const Row, const StringView>;
|
||||
using ConstIterator = AK::SimpleIterator<const Row, StringView const>;
|
||||
using Iterator = AK::SimpleIterator<Row, StringView>;
|
||||
|
||||
constexpr ConstIterator begin() const { return ConstIterator::begin(*this); }
|
||||
|
@ -124,20 +124,26 @@ public:
|
|||
template<bool const_>
|
||||
class RowIterator {
|
||||
public:
|
||||
explicit RowIterator(const XSV& xsv, size_t init_index = 0) requires(const_)
|
||||
explicit RowIterator(const XSV& xsv, size_t init_index = 0)
|
||||
requires(const_)
|
||||
: m_xsv(const_cast<XSV&>(xsv))
|
||||
, m_index(init_index)
|
||||
{
|
||||
}
|
||||
|
||||
explicit RowIterator(XSV& xsv, size_t init_index = 0) requires(!const_)
|
||||
explicit RowIterator(XSV& xsv, size_t init_index = 0)
|
||||
requires(!const_)
|
||||
: m_xsv(xsv)
|
||||
, m_index(init_index)
|
||||
{
|
||||
}
|
||||
|
||||
Row operator*() const { return Row { m_xsv, m_index }; }
|
||||
Row operator*() requires(!const_) { return Row { m_xsv, m_index }; }
|
||||
Row operator*()
|
||||
requires(!const_)
|
||||
{
|
||||
return Row { m_xsv, m_index };
|
||||
}
|
||||
|
||||
RowIterator& operator++()
|
||||
{
|
||||
|
|
|
@ -17,7 +17,10 @@
|
|||
class ConnectionToServer final : public HackStudio::ConnectionToServer { \
|
||||
IPC_CLIENT_CONNECTION(ConnectionToServer, "/tmp/session/%sid/portal/language/" socket_name) \
|
||||
public: \
|
||||
static char const* language_name() { return #language_name_; } \
|
||||
static char const* language_name() \
|
||||
{ \
|
||||
return #language_name_; \
|
||||
} \
|
||||
\
|
||||
private: \
|
||||
ConnectionToServer(NonnullOwnPtr<Core::Stream::LocalSocket> socket, String const& project_path) \
|
||||
|
|
|
@ -38,16 +38,43 @@
|
|||
m_vpu.name(insn); \
|
||||
}
|
||||
|
||||
#define DEFINE_GENERIC_SHIFT_ROTATE_INSN_HANDLERS(mnemonic, op) \
|
||||
void SoftCPU::mnemonic##_RM8_1(const X86::Instruction& insn) { generic_RM8_1(op<ValueWithShadow<u8>>, insn); } \
|
||||
void SoftCPU::mnemonic##_RM8_CL(const X86::Instruction& insn) { generic_RM8_CL(op<ValueWithShadow<u8>>, insn); } \
|
||||
void SoftCPU::mnemonic##_RM8_imm8(const X86::Instruction& insn) { generic_RM8_imm8<true, false>(op<ValueWithShadow<u8>>, insn); } \
|
||||
void SoftCPU::mnemonic##_RM16_1(const X86::Instruction& insn) { generic_RM16_1(op<ValueWithShadow<u16>>, insn); } \
|
||||
void SoftCPU::mnemonic##_RM16_CL(const X86::Instruction& insn) { generic_RM16_CL(op<ValueWithShadow<u16>>, insn); } \
|
||||
void SoftCPU::mnemonic##_RM16_imm8(const X86::Instruction& insn) { generic_RM16_unsigned_imm8<true>(op<ValueWithShadow<u16>>, insn); } \
|
||||
void SoftCPU::mnemonic##_RM32_1(const X86::Instruction& insn) { generic_RM32_1(op<ValueWithShadow<u32>>, insn); } \
|
||||
void SoftCPU::mnemonic##_RM32_CL(const X86::Instruction& insn) { generic_RM32_CL(op<ValueWithShadow<u32>>, insn); } \
|
||||
void SoftCPU::mnemonic##_RM32_imm8(const X86::Instruction& insn) { generic_RM32_unsigned_imm8<true>(op<ValueWithShadow<u32>>, insn); }
|
||||
#define DEFINE_GENERIC_SHIFT_ROTATE_INSN_HANDLERS(mnemonic, op) \
|
||||
void SoftCPU::mnemonic##_RM8_1(const X86::Instruction& insn) \
|
||||
{ \
|
||||
generic_RM8_1(op<ValueWithShadow<u8>>, insn); \
|
||||
} \
|
||||
void SoftCPU::mnemonic##_RM8_CL(const X86::Instruction& insn) \
|
||||
{ \
|
||||
generic_RM8_CL(op<ValueWithShadow<u8>>, insn); \
|
||||
} \
|
||||
void SoftCPU::mnemonic##_RM8_imm8(const X86::Instruction& insn) \
|
||||
{ \
|
||||
generic_RM8_imm8<true, false>(op<ValueWithShadow<u8>>, insn); \
|
||||
} \
|
||||
void SoftCPU::mnemonic##_RM16_1(const X86::Instruction& insn) \
|
||||
{ \
|
||||
generic_RM16_1(op<ValueWithShadow<u16>>, insn); \
|
||||
} \
|
||||
void SoftCPU::mnemonic##_RM16_CL(const X86::Instruction& insn) \
|
||||
{ \
|
||||
generic_RM16_CL(op<ValueWithShadow<u16>>, insn); \
|
||||
} \
|
||||
void SoftCPU::mnemonic##_RM16_imm8(const X86::Instruction& insn) \
|
||||
{ \
|
||||
generic_RM16_unsigned_imm8<true>(op<ValueWithShadow<u16>>, insn); \
|
||||
} \
|
||||
void SoftCPU::mnemonic##_RM32_1(const X86::Instruction& insn) \
|
||||
{ \
|
||||
generic_RM32_1(op<ValueWithShadow<u32>>, insn); \
|
||||
} \
|
||||
void SoftCPU::mnemonic##_RM32_CL(const X86::Instruction& insn) \
|
||||
{ \
|
||||
generic_RM32_CL(op<ValueWithShadow<u32>>, insn); \
|
||||
} \
|
||||
void SoftCPU::mnemonic##_RM32_imm8(const X86::Instruction& insn) \
|
||||
{ \
|
||||
generic_RM32_unsigned_imm8<true>(op<ValueWithShadow<u32>>, insn); \
|
||||
}
|
||||
|
||||
namespace UserspaceEmulator {
|
||||
|
||||
|
@ -332,7 +359,7 @@ ALWAYS_INLINE static T op_dec(SoftCPU& cpu, T data)
|
|||
}
|
||||
|
||||
template<typename T>
|
||||
ALWAYS_INLINE static T op_xor(SoftCPU& cpu, const T& dest, const T& src)
|
||||
ALWAYS_INLINE static T op_xor(SoftCPU& cpu, T const& dest, T const& src)
|
||||
{
|
||||
typename T::ValueType result;
|
||||
u32 new_flags = 0;
|
||||
|
@ -364,7 +391,7 @@ ALWAYS_INLINE static T op_xor(SoftCPU& cpu, const T& dest, const T& src)
|
|||
}
|
||||
|
||||
template<typename T>
|
||||
ALWAYS_INLINE static T op_or(SoftCPU& cpu, const T& dest, const T& src)
|
||||
ALWAYS_INLINE static T op_or(SoftCPU& cpu, T const& dest, T const& src)
|
||||
{
|
||||
typename T::ValueType result = 0;
|
||||
u32 new_flags = 0;
|
||||
|
@ -396,7 +423,7 @@ ALWAYS_INLINE static T op_or(SoftCPU& cpu, const T& dest, const T& src)
|
|||
}
|
||||
|
||||
template<typename T>
|
||||
ALWAYS_INLINE static T op_sub(SoftCPU& cpu, const T& dest, const T& src)
|
||||
ALWAYS_INLINE static T op_sub(SoftCPU& cpu, T const& dest, T const& src)
|
||||
{
|
||||
typename T::ValueType result = 0;
|
||||
u32 new_flags = 0;
|
||||
|
@ -428,7 +455,7 @@ ALWAYS_INLINE static T op_sub(SoftCPU& cpu, const T& dest, const T& src)
|
|||
}
|
||||
|
||||
template<typename T, bool cf>
|
||||
ALWAYS_INLINE static T op_sbb_impl(SoftCPU& cpu, const T& dest, const T& src)
|
||||
ALWAYS_INLINE static T op_sbb_impl(SoftCPU& cpu, T const& dest, T const& src)
|
||||
{
|
||||
typename T::ValueType result = 0;
|
||||
u32 new_flags = 0;
|
||||
|
@ -465,7 +492,7 @@ ALWAYS_INLINE static T op_sbb_impl(SoftCPU& cpu, const T& dest, const T& src)
|
|||
}
|
||||
|
||||
template<typename T>
|
||||
ALWAYS_INLINE static T op_sbb(SoftCPU& cpu, T& dest, const T& src)
|
||||
ALWAYS_INLINE static T op_sbb(SoftCPU& cpu, T& dest, T const& src)
|
||||
{
|
||||
cpu.warn_if_flags_tainted("sbb");
|
||||
if (cpu.cf())
|
||||
|
@ -474,7 +501,7 @@ ALWAYS_INLINE static T op_sbb(SoftCPU& cpu, T& dest, const T& src)
|
|||
}
|
||||
|
||||
template<typename T>
|
||||
ALWAYS_INLINE static T op_add(SoftCPU& cpu, T& dest, const T& src)
|
||||
ALWAYS_INLINE static T op_add(SoftCPU& cpu, T& dest, T const& src)
|
||||
{
|
||||
typename T::ValueType result = 0;
|
||||
u32 new_flags = 0;
|
||||
|
@ -506,7 +533,7 @@ ALWAYS_INLINE static T op_add(SoftCPU& cpu, T& dest, const T& src)
|
|||
}
|
||||
|
||||
template<typename T, bool cf>
|
||||
ALWAYS_INLINE static T op_adc_impl(SoftCPU& cpu, T& dest, const T& src)
|
||||
ALWAYS_INLINE static T op_adc_impl(SoftCPU& cpu, T& dest, T const& src)
|
||||
{
|
||||
typename T::ValueType result = 0;
|
||||
u32 new_flags = 0;
|
||||
|
@ -543,7 +570,7 @@ ALWAYS_INLINE static T op_adc_impl(SoftCPU& cpu, T& dest, const T& src)
|
|||
}
|
||||
|
||||
template<typename T>
|
||||
ALWAYS_INLINE static T op_adc(SoftCPU& cpu, T& dest, const T& src)
|
||||
ALWAYS_INLINE static T op_adc(SoftCPU& cpu, T& dest, T const& src)
|
||||
{
|
||||
cpu.warn_if_flags_tainted("adc");
|
||||
if (cpu.cf())
|
||||
|
@ -552,7 +579,7 @@ ALWAYS_INLINE static T op_adc(SoftCPU& cpu, T& dest, const T& src)
|
|||
}
|
||||
|
||||
template<typename T>
|
||||
ALWAYS_INLINE static T op_and(SoftCPU& cpu, const T& dest, const T& src)
|
||||
ALWAYS_INLINE static T op_and(SoftCPU& cpu, T const& dest, T const& src)
|
||||
{
|
||||
typename T::ValueType result = 0;
|
||||
u32 new_flags = 0;
|
||||
|
@ -584,7 +611,7 @@ ALWAYS_INLINE static T op_and(SoftCPU& cpu, const T& dest, const T& src)
|
|||
}
|
||||
|
||||
template<typename T>
|
||||
ALWAYS_INLINE static void op_imul(SoftCPU& cpu, const T& dest, const T& src, T& result_high, T& result_low)
|
||||
ALWAYS_INLINE static void op_imul(SoftCPU& cpu, T const& dest, T const& src, T& result_high, T& result_low)
|
||||
{
|
||||
bool did_overflow = false;
|
||||
if constexpr (sizeof(T) == 4) {
|
||||
|
@ -1167,11 +1194,23 @@ ALWAYS_INLINE void BTx_RM32_imm8(SoftCPU& cpu, const X86::Instruction& insn, Op
|
|||
insn.modrm().write32(cpu, insn, shadow_wrap_with_taint_from(result, original));
|
||||
}
|
||||
|
||||
#define DEFINE_GENERIC_BTx_INSN_HANDLERS(mnemonic, op, update_dest) \
|
||||
void SoftCPU::mnemonic##_RM32_reg32(const X86::Instruction& insn) { BTx_RM32_reg32<update_dest>(*this, insn, op<u32>); } \
|
||||
void SoftCPU::mnemonic##_RM16_reg16(const X86::Instruction& insn) { BTx_RM16_reg16<update_dest>(*this, insn, op<u16>); } \
|
||||
void SoftCPU::mnemonic##_RM32_imm8(const X86::Instruction& insn) { BTx_RM32_imm8<update_dest>(*this, insn, op<u32>); } \
|
||||
void SoftCPU::mnemonic##_RM16_imm8(const X86::Instruction& insn) { BTx_RM16_imm8<update_dest>(*this, insn, op<u16>); }
|
||||
#define DEFINE_GENERIC_BTx_INSN_HANDLERS(mnemonic, op, update_dest) \
|
||||
void SoftCPU::mnemonic##_RM32_reg32(const X86::Instruction& insn) \
|
||||
{ \
|
||||
BTx_RM32_reg32<update_dest>(*this, insn, op<u32>); \
|
||||
} \
|
||||
void SoftCPU::mnemonic##_RM16_reg16(const X86::Instruction& insn) \
|
||||
{ \
|
||||
BTx_RM16_reg16<update_dest>(*this, insn, op<u16>); \
|
||||
} \
|
||||
void SoftCPU::mnemonic##_RM32_imm8(const X86::Instruction& insn) \
|
||||
{ \
|
||||
BTx_RM32_imm8<update_dest>(*this, insn, op<u32>); \
|
||||
} \
|
||||
void SoftCPU::mnemonic##_RM16_imm8(const X86::Instruction& insn) \
|
||||
{ \
|
||||
BTx_RM16_imm8<update_dest>(*this, insn, op<u16>); \
|
||||
}
|
||||
|
||||
DEFINE_GENERIC_BTx_INSN_HANDLERS(BTS, op_bts, true);
|
||||
DEFINE_GENERIC_BTx_INSN_HANDLERS(BTR, op_btr, true);
|
||||
|
@ -2903,24 +2942,66 @@ void SoftCPU::XLAT(const X86::Instruction& insn)
|
|||
set_al(read_memory8({ segment(insn.segment_prefix().value_or(X86::SegmentRegister::DS)), offset }));
|
||||
}
|
||||
|
||||
#define DEFINE_GENERIC_INSN_HANDLERS_PARTIAL(mnemonic, op, update_dest, is_zero_idiom_if_both_operands_same, is_or) \
|
||||
void SoftCPU::mnemonic##_AL_imm8(const X86::Instruction& insn) { generic_AL_imm8<update_dest, is_or>(op<ValueWithShadow<u8>>, insn); } \
|
||||
void SoftCPU::mnemonic##_AX_imm16(const X86::Instruction& insn) { generic_AX_imm16<update_dest, is_or>(op<ValueWithShadow<u16>>, insn); } \
|
||||
void SoftCPU::mnemonic##_EAX_imm32(const X86::Instruction& insn) { generic_EAX_imm32<update_dest, is_or>(op<ValueWithShadow<u32>>, insn); } \
|
||||
void SoftCPU::mnemonic##_RM16_imm16(const X86::Instruction& insn) { generic_RM16_imm16<update_dest, is_or>(op<ValueWithShadow<u16>>, insn); } \
|
||||
void SoftCPU::mnemonic##_RM16_reg16(const X86::Instruction& insn) { generic_RM16_reg16<update_dest, is_zero_idiom_if_both_operands_same>(op<ValueWithShadow<u16>>, insn); } \
|
||||
void SoftCPU::mnemonic##_RM32_imm32(const X86::Instruction& insn) { generic_RM32_imm32<update_dest, is_or>(op<ValueWithShadow<u32>>, insn); } \
|
||||
void SoftCPU::mnemonic##_RM32_reg32(const X86::Instruction& insn) { generic_RM32_reg32<update_dest, is_zero_idiom_if_both_operands_same>(op<ValueWithShadow<u32>>, insn); } \
|
||||
void SoftCPU::mnemonic##_RM8_imm8(const X86::Instruction& insn) { generic_RM8_imm8<update_dest, is_or>(op<ValueWithShadow<u8>>, insn); } \
|
||||
void SoftCPU::mnemonic##_RM8_reg8(const X86::Instruction& insn) { generic_RM8_reg8<update_dest, is_zero_idiom_if_both_operands_same>(op<ValueWithShadow<u8>>, insn); }
|
||||
#define DEFINE_GENERIC_INSN_HANDLERS_PARTIAL(mnemonic, op, update_dest, is_zero_idiom_if_both_operands_same, is_or) \
|
||||
void SoftCPU::mnemonic##_AL_imm8(const X86::Instruction& insn) \
|
||||
{ \
|
||||
generic_AL_imm8<update_dest, is_or>(op<ValueWithShadow<u8>>, insn); \
|
||||
} \
|
||||
void SoftCPU::mnemonic##_AX_imm16(const X86::Instruction& insn) \
|
||||
{ \
|
||||
generic_AX_imm16<update_dest, is_or>(op<ValueWithShadow<u16>>, insn); \
|
||||
} \
|
||||
void SoftCPU::mnemonic##_EAX_imm32(const X86::Instruction& insn) \
|
||||
{ \
|
||||
generic_EAX_imm32<update_dest, is_or>(op<ValueWithShadow<u32>>, insn); \
|
||||
} \
|
||||
void SoftCPU::mnemonic##_RM16_imm16(const X86::Instruction& insn) \
|
||||
{ \
|
||||
generic_RM16_imm16<update_dest, is_or>(op<ValueWithShadow<u16>>, insn); \
|
||||
} \
|
||||
void SoftCPU::mnemonic##_RM16_reg16(const X86::Instruction& insn) \
|
||||
{ \
|
||||
generic_RM16_reg16<update_dest, is_zero_idiom_if_both_operands_same>(op<ValueWithShadow<u16>>, insn); \
|
||||
} \
|
||||
void SoftCPU::mnemonic##_RM32_imm32(const X86::Instruction& insn) \
|
||||
{ \
|
||||
generic_RM32_imm32<update_dest, is_or>(op<ValueWithShadow<u32>>, insn); \
|
||||
} \
|
||||
void SoftCPU::mnemonic##_RM32_reg32(const X86::Instruction& insn) \
|
||||
{ \
|
||||
generic_RM32_reg32<update_dest, is_zero_idiom_if_both_operands_same>(op<ValueWithShadow<u32>>, insn); \
|
||||
} \
|
||||
void SoftCPU::mnemonic##_RM8_imm8(const X86::Instruction& insn) \
|
||||
{ \
|
||||
generic_RM8_imm8<update_dest, is_or>(op<ValueWithShadow<u8>>, insn); \
|
||||
} \
|
||||
void SoftCPU::mnemonic##_RM8_reg8(const X86::Instruction& insn) \
|
||||
{ \
|
||||
generic_RM8_reg8<update_dest, is_zero_idiom_if_both_operands_same>(op<ValueWithShadow<u8>>, insn); \
|
||||
}
|
||||
|
||||
#define DEFINE_GENERIC_INSN_HANDLERS(mnemonic, op, update_dest, is_zero_idiom_if_both_operands_same, is_or) \
|
||||
DEFINE_GENERIC_INSN_HANDLERS_PARTIAL(mnemonic, op, update_dest, is_zero_idiom_if_both_operands_same, is_or) \
|
||||
void SoftCPU::mnemonic##_RM16_imm8(const X86::Instruction& insn) { generic_RM16_imm8<update_dest, is_or>(op<ValueWithShadow<u16>>, insn); } \
|
||||
void SoftCPU::mnemonic##_RM32_imm8(const X86::Instruction& insn) { generic_RM32_imm8<update_dest, is_or>(op<ValueWithShadow<u32>>, insn); } \
|
||||
void SoftCPU::mnemonic##_reg16_RM16(const X86::Instruction& insn) { generic_reg16_RM16<update_dest, is_zero_idiom_if_both_operands_same>(op<ValueWithShadow<u16>>, insn); } \
|
||||
void SoftCPU::mnemonic##_reg32_RM32(const X86::Instruction& insn) { generic_reg32_RM32<update_dest, is_zero_idiom_if_both_operands_same>(op<ValueWithShadow<u32>>, insn); } \
|
||||
void SoftCPU::mnemonic##_reg8_RM8(const X86::Instruction& insn) { generic_reg8_RM8<update_dest, is_zero_idiom_if_both_operands_same>(op<ValueWithShadow<u8>>, insn); }
|
||||
#define DEFINE_GENERIC_INSN_HANDLERS(mnemonic, op, update_dest, is_zero_idiom_if_both_operands_same, is_or) \
|
||||
DEFINE_GENERIC_INSN_HANDLERS_PARTIAL(mnemonic, op, update_dest, is_zero_idiom_if_both_operands_same, is_or) \
|
||||
void SoftCPU::mnemonic##_RM16_imm8(const X86::Instruction& insn) \
|
||||
{ \
|
||||
generic_RM16_imm8<update_dest, is_or>(op<ValueWithShadow<u16>>, insn); \
|
||||
} \
|
||||
void SoftCPU::mnemonic##_RM32_imm8(const X86::Instruction& insn) \
|
||||
{ \
|
||||
generic_RM32_imm8<update_dest, is_or>(op<ValueWithShadow<u32>>, insn); \
|
||||
} \
|
||||
void SoftCPU::mnemonic##_reg16_RM16(const X86::Instruction& insn) \
|
||||
{ \
|
||||
generic_reg16_RM16<update_dest, is_zero_idiom_if_both_operands_same>(op<ValueWithShadow<u16>>, insn); \
|
||||
} \
|
||||
void SoftCPU::mnemonic##_reg32_RM32(const X86::Instruction& insn) \
|
||||
{ \
|
||||
generic_reg32_RM32<update_dest, is_zero_idiom_if_both_operands_same>(op<ValueWithShadow<u32>>, insn); \
|
||||
} \
|
||||
void SoftCPU::mnemonic##_reg8_RM8(const X86::Instruction& insn) \
|
||||
{ \
|
||||
generic_reg8_RM8<update_dest, is_zero_idiom_if_both_operands_same>(op<ValueWithShadow<u8>>, insn); \
|
||||
}
|
||||
|
||||
DEFINE_GENERIC_INSN_HANDLERS(XOR, op_xor, true, true, false)
|
||||
DEFINE_GENERIC_INSN_HANDLERS(OR, op_or, true, false, true)
|
||||
|
|
|
@ -469,19 +469,19 @@ public:
|
|||
void do_once_or_repeat(const X86::Instruction& insn, Callback);
|
||||
|
||||
template<typename A>
|
||||
void taint_flags_from(const A& a)
|
||||
void taint_flags_from(A const& a)
|
||||
{
|
||||
m_flags_tainted = a.is_uninitialized();
|
||||
}
|
||||
|
||||
template<typename A, typename B>
|
||||
void taint_flags_from(const A& a, const B& b)
|
||||
void taint_flags_from(A const& a, B const& b)
|
||||
{
|
||||
m_flags_tainted = a.is_uninitialized() || b.is_uninitialized();
|
||||
}
|
||||
|
||||
template<typename A, typename B, typename C>
|
||||
void taint_flags_from(const A& a, const B& b, const C& c)
|
||||
void taint_flags_from(A const& a, B const& b, C const& c)
|
||||
{
|
||||
m_flags_tainted = a.is_uninitialized() || b.is_uninitialized() || c.is_uninitialized();
|
||||
}
|
||||
|
|
|
@ -33,7 +33,8 @@ public:
|
|||
void dump_backtrace();
|
||||
|
||||
template<typename T>
|
||||
ValueWithShadow<T> read(X86::LogicalAddress address) requires(IsTriviallyConstructible<T>)
|
||||
ValueWithShadow<T> read(X86::LogicalAddress address)
|
||||
requires(IsTriviallyConstructible<T>)
|
||||
{
|
||||
auto* region = find_region(address);
|
||||
if (!region) {
|
||||
|
|
|
@ -51,13 +51,15 @@ public:
|
|||
T value() const { return m_value; }
|
||||
ShadowType const& shadow() const { return m_shadow; }
|
||||
|
||||
T shadow_as_value() const requires(IsTriviallyConstructible<T>)
|
||||
T shadow_as_value() const
|
||||
requires(IsTriviallyConstructible<T>)
|
||||
{
|
||||
return *bit_cast<T const*>(m_shadow.data());
|
||||
}
|
||||
|
||||
template<auto member>
|
||||
auto reference_to() requires(IsClass<T> || IsUnion<T>)
|
||||
auto reference_to()
|
||||
requires(IsClass<T> || IsUnion<T>)
|
||||
{
|
||||
using ResultType = ValueAndShadowReference<RemoveReference<decltype(declval<T>().*member)>>;
|
||||
return ResultType {
|
||||
|
@ -67,7 +69,8 @@ public:
|
|||
}
|
||||
|
||||
template<auto member>
|
||||
auto slice() const requires(IsClass<T> || IsUnion<T>)
|
||||
auto slice() const
|
||||
requires(IsClass<T> || IsUnion<T>)
|
||||
{
|
||||
using ResultType = ValueWithShadow<RemoveReference<decltype(declval<T>().*member)>>;
|
||||
return ResultType {
|
||||
|
@ -118,7 +121,8 @@ public:
|
|||
|
||||
ValueAndShadowReference<T>& operator=(ValueWithShadow<T> const&);
|
||||
|
||||
T shadow_as_value() const requires(IsTriviallyConstructible<T>)
|
||||
T shadow_as_value() const
|
||||
requires(IsTriviallyConstructible<T>)
|
||||
{
|
||||
return *bit_cast<T const*>(m_shadow.data());
|
||||
}
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
#ifndef _ASSERT_H
|
||||
# define _ASSERT_H
|
||||
|
||||
# define __stringify_helper(x) # x
|
||||
# define __stringify_helper(x) #x
|
||||
# define __stringify(x) __stringify_helper(x)
|
||||
|
||||
# ifndef __cplusplus
|
||||
|
|
|
@ -276,7 +276,7 @@ bool FILE::gets(T* data, size_t size)
|
|||
if (m_buffer.may_use()) {
|
||||
// Let's see if the buffer has something queued for us.
|
||||
size_t queued_size;
|
||||
const T* queued_data = bit_cast<const T*>(m_buffer.begin_dequeue(queued_size));
|
||||
T const* queued_data = bit_cast<T const*>(m_buffer.begin_dequeue(queued_size));
|
||||
queued_size /= sizeof(T);
|
||||
if (queued_size == 0) {
|
||||
// Nothing buffered; we're going to have to read some.
|
||||
|
|
|
@ -8,7 +8,9 @@ extern "C" {
|
|||
|
||||
#define DO_STUB(name) \
|
||||
void name(); \
|
||||
void name() { }
|
||||
void name() \
|
||||
{ \
|
||||
}
|
||||
|
||||
DO_STUB(__register_frame_info);
|
||||
DO_STUB(__deregister_frame_info);
|
||||
|
|
|
@ -68,11 +68,11 @@ Vector<String> const& ShellComprehensionEngine::DocumentData::sourced_paths() co
|
|||
return all_sourced_paths.value();
|
||||
|
||||
struct : public ::Shell::AST::NodeVisitor {
|
||||
void visit(const ::Shell::AST::CastToCommand* node) override
|
||||
void visit(::Shell::AST::CastToCommand const* node) override
|
||||
{
|
||||
auto& inner = node->inner();
|
||||
if (inner->is_list()) {
|
||||
if (auto* list = dynamic_cast<const ::Shell::AST::ListConcatenate*>(inner.ptr())) {
|
||||
if (auto* list = dynamic_cast<::Shell::AST::ListConcatenate const*>(inner.ptr())) {
|
||||
auto& entries = list->list();
|
||||
if (entries.size() == 2 && entries.first()->is_bareword() && static_ptr_cast<::Shell::AST::BarewordLiteral>(entries.first())->text() == "source") {
|
||||
auto& filename = entries[1];
|
||||
|
@ -200,7 +200,7 @@ void ShellComprehensionEngine::update_declared_symbols(DocumentData const& docum
|
|||
{
|
||||
}
|
||||
|
||||
void visit(const ::Shell::AST::VariableDeclarations* node) override
|
||||
void visit(::Shell::AST::VariableDeclarations const* node) override
|
||||
{
|
||||
for (auto& entry : node->variables()) {
|
||||
auto literal = entry.name->leftmost_trivial_literal();
|
||||
|
@ -219,7 +219,7 @@ void ShellComprehensionEngine::update_declared_symbols(DocumentData const& docum
|
|||
::Shell::AST::NodeVisitor::visit(node);
|
||||
}
|
||||
|
||||
void visit(const ::Shell::AST::FunctionDeclaration* node) override
|
||||
void visit(::Shell::AST::FunctionDeclaration const* node) override
|
||||
{
|
||||
dbgln("Found function {}", node->name().name);
|
||||
declarations.append({ node->name().name, { filename, node->position().start_line.line_number, node->position().start_line.line_column }, CodeComprehension::DeclarationType::Function, {} });
|
||||
|
|
|
@ -56,12 +56,12 @@ public:
|
|||
}
|
||||
|
||||
template<typename T>
|
||||
const T* data() const
|
||||
T const* data() const
|
||||
{
|
||||
static_assert(IsVoid<T> || IsTrivial<T>);
|
||||
if (!m_impl)
|
||||
return nullptr;
|
||||
return (const T*)m_impl->data();
|
||||
return (T const*)m_impl->data();
|
||||
}
|
||||
|
||||
private:
|
||||
|
|
|
@ -461,7 +461,8 @@ void ArgsParser::add_option(StringView& value, char const* help_string, char con
|
|||
}
|
||||
|
||||
template<typename Integral>
|
||||
void ArgsParser::add_option(Integral& value, char const* help_string, char const* long_name, char short_name, char const* value_name, OptionHideMode hide_mode) requires(IsIntegral<Integral>)
|
||||
void ArgsParser::add_option(Integral& value, char const* help_string, char const* long_name, char short_name, char const* value_name, OptionHideMode hide_mode)
|
||||
requires(IsIntegral<Integral>)
|
||||
{
|
||||
Option option {
|
||||
OptionArgumentMode::Required,
|
||||
|
|
|
@ -90,7 +90,8 @@ public:
|
|||
void add_option(String& value, char const* help_string, char const* long_name, char short_name, char const* value_name, OptionHideMode hide_mode = OptionHideMode::None);
|
||||
void add_option(StringView& value, char const* help_string, char const* long_name, char short_name, char const* value_name, OptionHideMode hide_mode = OptionHideMode::None);
|
||||
template<typename Integral>
|
||||
void add_option(Integral& value, char const* help_string, char const* long_name, char short_name, char const* value_name, OptionHideMode hide_mode = OptionHideMode::None) requires(IsIntegral<Integral>);
|
||||
void add_option(Integral& value, char const* help_string, char const* long_name, char short_name, char const* value_name, OptionHideMode hide_mode = OptionHideMode::None)
|
||||
requires(IsIntegral<Integral>);
|
||||
void add_option(double& value, char const* help_string, char const* long_name, char short_name, char const* value_name, OptionHideMode hide_mode = OptionHideMode::None);
|
||||
void add_option(Optional<double>& value, char const* help_string, char const* long_name, char short_name, char const* value_name, OptionHideMode hide_mode = OptionHideMode::None);
|
||||
void add_option(Optional<size_t>& value, char const* help_string, char const* long_name, char short_name, char const* value_name, OptionHideMode hide_mode = OptionHideMode::None);
|
||||
|
|
|
@ -67,7 +67,10 @@ enum class TimerShouldFireWhenNotVisible {
|
|||
|
||||
#define C_OBJECT(klass) \
|
||||
public: \
|
||||
virtual StringView class_name() const override { return #klass##sv; } \
|
||||
virtual StringView class_name() const override \
|
||||
{ \
|
||||
return #klass##sv; \
|
||||
} \
|
||||
template<typename Klass = klass, class... Args> \
|
||||
static NonnullRefPtr<klass> construct(Args&&... args) \
|
||||
{ \
|
||||
|
@ -79,9 +82,12 @@ public:
|
|||
return adopt_nonnull_ref_or_enomem(new (nothrow) Klass(::forward<Args>(args)...)); \
|
||||
}
|
||||
|
||||
#define C_OBJECT_ABSTRACT(klass) \
|
||||
public: \
|
||||
virtual StringView class_name() const override { return #klass##sv; }
|
||||
#define C_OBJECT_ABSTRACT(klass) \
|
||||
public: \
|
||||
virtual StringView class_name() const override \
|
||||
{ \
|
||||
return #klass##sv; \
|
||||
}
|
||||
|
||||
class Object
|
||||
: public RefCounted<Object>
|
||||
|
@ -119,13 +125,16 @@ public:
|
|||
}
|
||||
|
||||
template<typename T, typename Callback>
|
||||
void for_each_child_of_type(Callback callback) requires IsBaseOf<Object, T>;
|
||||
void for_each_child_of_type(Callback callback)
|
||||
requires IsBaseOf<Object, T>;
|
||||
|
||||
template<typename T>
|
||||
T* find_child_of_type_named(String const&) requires IsBaseOf<Object, T>;
|
||||
T* find_child_of_type_named(String const&)
|
||||
requires IsBaseOf<Object, T>;
|
||||
|
||||
template<typename T>
|
||||
T* find_descendant_of_type_named(String const&) requires IsBaseOf<Object, T>;
|
||||
T* find_descendant_of_type_named(String const&)
|
||||
requires IsBaseOf<Object, T>;
|
||||
|
||||
bool is_ancestor_of(Object const&) const;
|
||||
|
||||
|
@ -226,7 +235,8 @@ struct AK::Formatter<Core::Object> : AK::Formatter<FormatString> {
|
|||
|
||||
namespace Core {
|
||||
template<typename T, typename Callback>
|
||||
inline void Object::for_each_child_of_type(Callback callback) requires IsBaseOf<Object, T>
|
||||
inline void Object::for_each_child_of_type(Callback callback)
|
||||
requires IsBaseOf<Object, T>
|
||||
{
|
||||
for_each_child([&](auto& child) {
|
||||
if (is<T>(child))
|
||||
|
@ -236,7 +246,8 @@ inline void Object::for_each_child_of_type(Callback callback) requires IsBaseOf<
|
|||
}
|
||||
|
||||
template<typename T>
|
||||
T* Object::find_child_of_type_named(String const& name) requires IsBaseOf<Object, T>
|
||||
T* Object::find_child_of_type_named(String const& name)
|
||||
requires IsBaseOf<Object, T>
|
||||
{
|
||||
T* found_child = nullptr;
|
||||
for_each_child_of_type<T>([&](auto& child) {
|
||||
|
@ -251,7 +262,8 @@ T* Object::find_child_of_type_named(String const& name) requires IsBaseOf<Object
|
|||
}
|
||||
|
||||
template<typename T>
|
||||
T* Object::find_descendant_of_type_named(String const& name) requires IsBaseOf<Object, T>
|
||||
T* Object::find_descendant_of_type_named(String const& name)
|
||||
requires IsBaseOf<Object, T>
|
||||
{
|
||||
if (is<T>(*this) && this->name() == name) {
|
||||
return static_cast<T*>(this);
|
||||
|
|
|
@ -279,7 +279,8 @@ class PosixSocketHelper {
|
|||
|
||||
public:
|
||||
template<typename T>
|
||||
PosixSocketHelper(Badge<T>) requires(IsBaseOf<Socket, T>)
|
||||
PosixSocketHelper(Badge<T>)
|
||||
requires(IsBaseOf<Socket, T>)
|
||||
{
|
||||
}
|
||||
|
||||
|
|
|
@ -29,7 +29,7 @@ public:
|
|||
Optional<size_t> index_of_node_at(Position) const;
|
||||
Optional<Token> token_at(Position) const;
|
||||
Optional<size_t> index_of_token_at(Position) const;
|
||||
RefPtr<const TranslationUnit> root_node() const { return m_root_node; }
|
||||
RefPtr<TranslationUnit const> root_node() const { return m_root_node; }
|
||||
String text_of_node(ASTNode const&) const;
|
||||
StringView text_of_token(Cpp::Token const& token) const;
|
||||
void print_tokens() const;
|
||||
|
|
|
@ -318,7 +318,7 @@ void pretty_print(Decoder& decoder, OutputStream& stream, int indent)
|
|||
break;
|
||||
}
|
||||
case Kind::BitString: {
|
||||
auto value = decoder.read<const BitmapView>();
|
||||
auto value = decoder.read<BitmapView const>();
|
||||
if (value.is_error()) {
|
||||
dbgln("BitString PrettyPrint error: {}", value.error());
|
||||
return;
|
||||
|
|
|
@ -18,7 +18,7 @@ class SignedBigInteger {
|
|||
public:
|
||||
template<typename T>
|
||||
requires(IsSigned<T> && sizeof(T) <= sizeof(i32))
|
||||
SignedBigInteger(T value)
|
||||
SignedBigInteger(T value)
|
||||
: m_sign(value < 0)
|
||||
, m_unsigned_data(abs(static_cast<i32>(value)))
|
||||
{
|
||||
|
|
|
@ -27,7 +27,7 @@ public:
|
|||
// This constructor accepts any unsigned with size up to Word.
|
||||
template<typename T>
|
||||
requires(IsIntegral<T> && sizeof(T) <= sizeof(Word))
|
||||
UnsignedBigInteger(T value)
|
||||
UnsignedBigInteger(T value)
|
||||
{
|
||||
m_words.append(static_cast<Word>(value));
|
||||
}
|
||||
|
|
|
@ -24,7 +24,7 @@ public:
|
|||
|
||||
virtual size_t IV_length() const = 0;
|
||||
|
||||
const T& cipher() const { return m_cipher; }
|
||||
T const& cipher() const { return m_cipher; }
|
||||
|
||||
ErrorOr<ByteBuffer> create_aligned_buffer(size_t input_size) const
|
||||
{
|
||||
|
|
|
@ -70,7 +70,8 @@ public:
|
|||
return value();
|
||||
}
|
||||
|
||||
operator double() const requires(IsSame<ParameterT, ParameterFixedPoint>)
|
||||
operator double() const
|
||||
requires(IsSame<ParameterT, ParameterFixedPoint>)
|
||||
{
|
||||
return static_cast<double>(value());
|
||||
}
|
||||
|
|
|
@ -166,16 +166,16 @@ T Parser::read_host(T const* field) const
|
|||
}
|
||||
|
||||
template<typename T>
|
||||
requires(IsIntegral<T> && sizeof(T) > 1) T Parser::read_le(T const* field)
|
||||
const
|
||||
requires(IsIntegral<T> && sizeof(T) > 1)
|
||||
T Parser::read_le(T const* field) const
|
||||
{
|
||||
static_assert(sizeof(T) > 1);
|
||||
return AK::convert_between_host_and_little_endian(read_host(field));
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
requires(IsIntegral<T> && sizeof(T) > 1) T Parser::read_be(T const* field)
|
||||
const
|
||||
requires(IsIntegral<T> && sizeof(T) > 1)
|
||||
T Parser::read_be(T const* field) const
|
||||
{
|
||||
static_assert(sizeof(T) > 1);
|
||||
return AK::convert_between_host_and_big_endian(read_host(field));
|
||||
|
|
|
@ -438,12 +438,12 @@ private:
|
|||
T read_host(T const*) const;
|
||||
|
||||
template<typename T>
|
||||
requires(IsIntegral<T> && sizeof(T) > 1) T read_le(T const*)
|
||||
const;
|
||||
requires(IsIntegral<T> && sizeof(T) > 1)
|
||||
T read_le(T const*) const;
|
||||
|
||||
template<typename T>
|
||||
requires(IsIntegral<T> && sizeof(T) > 1) T read_be(T const*)
|
||||
const;
|
||||
requires(IsIntegral<T> && sizeof(T) > 1)
|
||||
T read_be(T const*) const;
|
||||
|
||||
Definitions::EDID const& raw_edid() const;
|
||||
ErrorOr<IterationDecision> for_each_display_descriptor(Function<IterationDecision(u8, Definitions::DisplayDescriptor const&)>) const;
|
||||
|
|
|
@ -305,8 +305,7 @@ void GLContext::gl_blend_func(GLenum src_factor, GLenum dst_factor)
|
|||
m_blend_source_factor = src_factor;
|
||||
m_blend_destination_factor = dst_factor;
|
||||
|
||||
auto map_gl_blend_factor_to_device = [](GLenum factor) constexpr
|
||||
{
|
||||
auto map_gl_blend_factor_to_device = [](GLenum factor) constexpr {
|
||||
switch (factor) {
|
||||
case GL_ZERO:
|
||||
return GPU::BlendFactor::Zero;
|
||||
|
|
|
@ -16,21 +16,22 @@ namespace GUI {
|
|||
template<typename T, typename Container = Vector<T>, typename ColumnNameListType = void>
|
||||
class ItemListModel : public Model {
|
||||
public:
|
||||
static constexpr auto IsTwoDimensional = requires(Container data)
|
||||
{
|
||||
requires !IsVoid<ColumnNameListType>;
|
||||
data.at(0).at(0);
|
||||
data.at(0).size();
|
||||
};
|
||||
static constexpr auto IsTwoDimensional = requires(Container data) {
|
||||
requires !IsVoid<ColumnNameListType>;
|
||||
data.at(0).at(0);
|
||||
data.at(0).size();
|
||||
};
|
||||
|
||||
// Substitute 'void' for a dummy u8.
|
||||
using ColumnNamesT = Conditional<IsVoid<ColumnNameListType>, u8, ColumnNameListType>;
|
||||
|
||||
static NonnullRefPtr<ItemListModel> create(Container const& data, ColumnNamesT const& column_names, Optional<size_t> const& row_count = {}) requires(IsTwoDimensional)
|
||||
static NonnullRefPtr<ItemListModel> create(Container const& data, ColumnNamesT const& column_names, Optional<size_t> const& row_count = {})
|
||||
requires(IsTwoDimensional)
|
||||
{
|
||||
return adopt_ref(*new ItemListModel<T, Container, ColumnNameListType>(data, column_names, row_count));
|
||||
}
|
||||
static NonnullRefPtr<ItemListModel> create(Container const& data, Optional<size_t> const& row_count = {}) requires(!IsTwoDimensional)
|
||||
static NonnullRefPtr<ItemListModel> create(Container const& data, Optional<size_t> const& row_count = {})
|
||||
requires(!IsTwoDimensional)
|
||||
{
|
||||
return adopt_ref(*new ItemListModel<T, Container>(data, row_count));
|
||||
}
|
||||
|
@ -119,13 +120,15 @@ public:
|
|||
}
|
||||
|
||||
protected:
|
||||
explicit ItemListModel(Container const& data, Optional<size_t> row_count = {}) requires(!IsTwoDimensional)
|
||||
explicit ItemListModel(Container const& data, Optional<size_t> row_count = {})
|
||||
requires(!IsTwoDimensional)
|
||||
: m_data(data)
|
||||
, m_provided_row_count(move(row_count))
|
||||
{
|
||||
}
|
||||
|
||||
explicit ItemListModel(Container const& data, ColumnNamesT const& column_names, Optional<size_t> row_count = {}) requires(IsTwoDimensional)
|
||||
explicit ItemListModel(Container const& data, ColumnNamesT const& column_names, Optional<size_t> row_count = {})
|
||||
requires(IsTwoDimensional)
|
||||
: m_data(data)
|
||||
, m_column_names(column_names)
|
||||
, m_provided_row_count(move(row_count))
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue