LibWeb: Use Optional<ssize_t> for the HTMLTokenizer insertion points

Instead of making a custom struct, we can just use Optional here,
to make the code feel a bit more idiomatic.
This commit is contained in:
Andreas Kling 2025-10-23 21:42:33 +02:00 committed by Andreas Kling
commit b10f2993b3
Notes: github-actions[bot] 2025-10-24 06:54:29 +00:00
2 changed files with 12 additions and 23 deletions

View file

@ -243,8 +243,8 @@ Optional<u32> HTMLTokenizer::peek_code_point(ssize_t offset, StopAtInsertionPoin
if (it >= static_cast<ssize_t>(m_decoded_input.size()))
return {};
if (stop_at_insertion_point == StopAtInsertionPoint::Yes
&& m_insertion_point.defined
&& it >= m_insertion_point.position) {
&& m_insertion_point.has_value()
&& it >= *m_insertion_point) {
return {};
}
return m_decoded_input[it];
@ -2900,7 +2900,7 @@ void HTMLTokenizer::insert_input_at_insertion_point(StringView input)
Vector<u32> new_decoded_input;
new_decoded_input.ensure_capacity(m_decoded_input.size() + input.length());
auto before = m_decoded_input.span().slice(0, m_insertion_point.position);
auto before = m_decoded_input.span().slice(0, *m_insertion_point);
new_decoded_input.append(before.data(), before.size());
auto utf8_to_insert = MUST(String::from_utf8(input));
@ -2910,11 +2910,11 @@ void HTMLTokenizer::insert_input_at_insertion_point(StringView input)
++code_points_inserted;
}
auto after = m_decoded_input.span().slice(m_insertion_point.position);
auto after = m_decoded_input.span().slice(*m_insertion_point);
new_decoded_input.append(after.data(), after.size());
m_decoded_input = move(new_decoded_input);
m_insertion_point.position += code_points_inserted;
m_insertion_point.value() += code_points_inserted;
}
void HTMLTokenizer::insert_eof()

View file

@ -135,19 +135,12 @@ public:
void insert_eof();
bool is_eof_inserted();
bool is_insertion_point_defined() const { return m_insertion_point.defined; }
bool is_insertion_point_reached()
{
return m_insertion_point.defined && m_current_offset >= m_insertion_point.position;
}
void undefine_insertion_point() { m_insertion_point.defined = false; }
bool is_insertion_point_defined() const { return m_insertion_point.has_value(); }
bool is_insertion_point_reached() { return m_insertion_point.has_value() && m_current_offset >= *m_insertion_point; }
void undefine_insertion_point() { m_insertion_point = {}; }
void store_insertion_point() { m_old_insertion_point = m_insertion_point; }
void restore_insertion_point() { m_insertion_point = m_old_insertion_point; }
void update_insertion_point()
{
m_insertion_point.defined = true;
m_insertion_point.position = m_current_offset;
}
void restore_insertion_point() { m_insertion_point = move(m_old_insertion_point); }
void update_insertion_point() { m_insertion_point = m_current_offset; }
// This permanently cuts off the tokenizer input stream.
void abort() { m_aborted = true; }
@ -199,12 +192,8 @@ private:
String m_source;
Vector<u32> m_decoded_input;
struct InsertionPoint {
ssize_t position { 0 };
bool defined { false };
};
InsertionPoint m_insertion_point {};
InsertionPoint m_old_insertion_point {};
Optional<ssize_t> m_insertion_point;
Optional<ssize_t> m_old_insertion_point;
ssize_t m_current_offset { 0 };
ssize_t m_prev_offset { 0 };