mirror of
https://github.com/LadybirdBrowser/ladybird.git
synced 2025-07-31 05:09:12 +00:00
LibWeb/CSS: Bring TokenStream in line with spec
When the TokenStream code was originally written, there was no such concept in the CSS Syntax spec. But since then, it's been officially added, (https://drafts.csswg.org/css-syntax/#css-token-stream) and the parsing algorithms are described in terms of it. This patch brings our implementation in line with the spec. A few deprecated TokenStream methods are left around until their users are also updated to match the newer spec. There are a few differences: - They name things differently. The main confusing one is we had `next_token()` which consumed a token and returned it, but the spec has a `next_token()` which peeks the next token. The spec names are honestly better than what I'd come up with. (`discard_a_token()` is a nice addition too!) - We used to store the index of the token that was just consumed, and they instead store the index of the token that will be consumed next. This is a perfect breeding ground for off-by-one errors, so I've finally added a test suite for TokenStream itself. - We use a transaction system for rewinding, and the spec uses a stack of "marks", which can be manually rewound to. These should be able to coexist as long as we stick with marks in the parser spec algorithms, and stick with transactions elsewhere.
This commit is contained in:
parent
5df6c6eecf
commit
b645e26e9b
Notes:
github-actions[bot]
2024-10-09 16:30:23 +00:00
Author: https://github.com/AtkinsSJ
Commit: b645e26e9b
Pull-request: https://github.com/LadybirdBrowser/ladybird/pull/1694
8 changed files with 763 additions and 603 deletions
|
@ -97,15 +97,15 @@ namespace Web::CSS::Parser {
|
||||||
static Optional<RoundingStrategy> parse_rounding_strategy(Vector<ComponentValue> const& tokens)
|
static Optional<RoundingStrategy> parse_rounding_strategy(Vector<ComponentValue> const& tokens)
|
||||||
{
|
{
|
||||||
auto stream = TokenStream { tokens };
|
auto stream = TokenStream { tokens };
|
||||||
stream.skip_whitespace();
|
stream.discard_whitespace();
|
||||||
if (!stream.has_next_token())
|
if (!stream.has_next_token())
|
||||||
return {};
|
return {};
|
||||||
|
|
||||||
auto& ident = stream.next_token();
|
auto& ident = stream.consume_a_token();
|
||||||
if (!ident.is(Token::Type::Ident))
|
if (!ident.is(Token::Type::Ident))
|
||||||
return {};
|
return {};
|
||||||
|
|
||||||
stream.skip_whitespace();
|
stream.discard_whitespace();
|
||||||
if (stream.has_next_token())
|
if (stream.has_next_token())
|
||||||
return {};
|
return {};
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
set(TEST_SOURCES
|
set(TEST_SOURCES
|
||||||
TestCSSIDSpeed.cpp
|
TestCSSIDSpeed.cpp
|
||||||
TestCSSPixels.cpp
|
TestCSSPixels.cpp
|
||||||
|
TestCSSTokenStream.cpp
|
||||||
TestFetchInfrastructure.cpp
|
TestFetchInfrastructure.cpp
|
||||||
TestFetchURL.cpp
|
TestFetchURL.cpp
|
||||||
TestHTMLTokenizer.cpp
|
TestHTMLTokenizer.cpp
|
||||||
|
|
98
Tests/LibWeb/TestCSSTokenStream.cpp
Normal file
98
Tests/LibWeb/TestCSSTokenStream.cpp
Normal file
|
@ -0,0 +1,98 @@
|
||||||
|
/*
|
||||||
|
* Copyright (c) 2024, Sam Atkins <sam@ladybird.org>
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: BSD-2-Clause
|
||||||
|
*/
|
||||||
|
|
||||||
|
#include <AK/FlyString.h>
|
||||||
|
#include <AK/Vector.h>
|
||||||
|
#include <LibTest/TestCase.h>
|
||||||
|
#include <LibWeb/CSS/Parser/TokenStream.h>
|
||||||
|
|
||||||
|
namespace Web::CSS::Parser {
|
||||||
|
|
||||||
|
TEST_CASE(basic)
|
||||||
|
{
|
||||||
|
Vector<Token> tokens {
|
||||||
|
Token::create_ident("hello"_fly_string),
|
||||||
|
};
|
||||||
|
|
||||||
|
TokenStream stream { tokens };
|
||||||
|
EXPECT(!stream.is_empty());
|
||||||
|
EXPECT(stream.has_next_token());
|
||||||
|
EXPECT_EQ(stream.remaining_token_count(), 1u);
|
||||||
|
|
||||||
|
// next_token() doesn't consume it
|
||||||
|
auto const& next = stream.next_token();
|
||||||
|
EXPECT(!stream.is_empty());
|
||||||
|
EXPECT(stream.has_next_token());
|
||||||
|
EXPECT_EQ(stream.remaining_token_count(), 1u);
|
||||||
|
// Check what the token is
|
||||||
|
EXPECT(next.is(Token::Type::Ident));
|
||||||
|
EXPECT_EQ(next.ident(), "hello"_fly_string);
|
||||||
|
|
||||||
|
// consume_a_token() does consume it
|
||||||
|
auto const& consumed = stream.consume_a_token();
|
||||||
|
EXPECT(stream.is_empty());
|
||||||
|
EXPECT(!stream.has_next_token());
|
||||||
|
EXPECT_EQ(stream.remaining_token_count(), 0u);
|
||||||
|
// Check what the token is
|
||||||
|
EXPECT(consumed.is(Token::Type::Ident));
|
||||||
|
EXPECT_EQ(consumed.ident(), "hello"_fly_string);
|
||||||
|
|
||||||
|
// Now, any further tokens should be EOF
|
||||||
|
EXPECT(stream.next_token().is(Token::Type::EndOfFile));
|
||||||
|
EXPECT(stream.consume_a_token().is(Token::Type::EndOfFile));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_CASE(marks)
|
||||||
|
{
|
||||||
|
Vector<Token> tokens {
|
||||||
|
Token::create_ident("a"_fly_string),
|
||||||
|
Token::create_ident("b"_fly_string),
|
||||||
|
Token::create_ident("c"_fly_string),
|
||||||
|
Token::create_ident("d"_fly_string),
|
||||||
|
Token::create_ident("e"_fly_string),
|
||||||
|
Token::create_ident("f"_fly_string),
|
||||||
|
Token::create_ident("g"_fly_string),
|
||||||
|
};
|
||||||
|
TokenStream stream { tokens };
|
||||||
|
|
||||||
|
stream.mark(); // 0
|
||||||
|
|
||||||
|
EXPECT_EQ(stream.remaining_token_count(), 7u);
|
||||||
|
|
||||||
|
stream.discard_a_token();
|
||||||
|
stream.discard_a_token();
|
||||||
|
stream.discard_a_token();
|
||||||
|
|
||||||
|
EXPECT_EQ(stream.remaining_token_count(), 4u);
|
||||||
|
|
||||||
|
stream.mark(); // 3
|
||||||
|
|
||||||
|
stream.discard_a_token();
|
||||||
|
|
||||||
|
EXPECT_EQ(stream.remaining_token_count(), 3u);
|
||||||
|
|
||||||
|
stream.restore_a_mark(); // Back to 3
|
||||||
|
|
||||||
|
EXPECT_EQ(stream.remaining_token_count(), 4u);
|
||||||
|
|
||||||
|
stream.discard_a_token();
|
||||||
|
stream.discard_a_token();
|
||||||
|
stream.discard_a_token();
|
||||||
|
|
||||||
|
EXPECT_EQ(stream.remaining_token_count(), 1u);
|
||||||
|
|
||||||
|
stream.mark(); // 6
|
||||||
|
|
||||||
|
stream.discard_a_mark();
|
||||||
|
|
||||||
|
EXPECT_EQ(stream.remaining_token_count(), 1u);
|
||||||
|
|
||||||
|
stream.restore_a_mark(); // Back to 0
|
||||||
|
|
||||||
|
EXPECT_EQ(stream.remaining_token_count(), 7u);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -27,20 +27,20 @@ Optional<Vector<TElement>> Parser::parse_color_stop_list(TokenStream<ComponentVa
|
||||||
};
|
};
|
||||||
|
|
||||||
auto parse_color_stop_list_element = [&](TElement& element) -> ElementType {
|
auto parse_color_stop_list_element = [&](TElement& element) -> ElementType {
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
if (!tokens.has_next_token())
|
if (!tokens.has_next_token())
|
||||||
return ElementType::Garbage;
|
return ElementType::Garbage;
|
||||||
|
|
||||||
RefPtr<CSSStyleValue> color;
|
RefPtr<CSSStyleValue> color;
|
||||||
Optional<typename TElement::PositionType> position;
|
Optional<typename TElement::PositionType> position;
|
||||||
Optional<typename TElement::PositionType> second_position;
|
Optional<typename TElement::PositionType> second_position;
|
||||||
if (auto dimension = parse_dimension(tokens.peek_token()); dimension.has_value() && is_position(*dimension)) {
|
if (auto dimension = parse_dimension(tokens.next_token()); dimension.has_value() && is_position(*dimension)) {
|
||||||
// [<T-percentage> <color>] or [<T-percentage>]
|
// [<T-percentage> <color>] or [<T-percentage>]
|
||||||
position = get_position(*dimension);
|
position = get_position(*dimension);
|
||||||
(void)tokens.next_token(); // dimension
|
tokens.discard_a_token(); // dimension
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
// <T-percentage>
|
// <T-percentage>
|
||||||
if (!tokens.has_next_token() || tokens.peek_token().is(Token::Type::Comma)) {
|
if (!tokens.has_next_token() || tokens.next_token().is(Token::Type::Comma)) {
|
||||||
element.transition_hint = typename TElement::ColorHint { *position };
|
element.transition_hint = typename TElement::ColorHint { *position };
|
||||||
return ElementType::ColorHint;
|
return ElementType::ColorHint;
|
||||||
}
|
}
|
||||||
|
@ -55,16 +55,16 @@ Optional<Vector<TElement>> Parser::parse_color_stop_list(TokenStream<ComponentVa
|
||||||
if (!maybe_color)
|
if (!maybe_color)
|
||||||
return ElementType::Garbage;
|
return ElementType::Garbage;
|
||||||
color = maybe_color.release_nonnull();
|
color = maybe_color.release_nonnull();
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
// Allow up to [<color> <T-percentage> <T-percentage>] (double-position color stops)
|
// Allow up to [<color> <T-percentage> <T-percentage>] (double-position color stops)
|
||||||
// Note: Double-position color stops only appear to be valid in this order.
|
// Note: Double-position color stops only appear to be valid in this order.
|
||||||
for (auto stop_position : Array { &position, &second_position }) {
|
for (auto stop_position : Array { &position, &second_position }) {
|
||||||
if (tokens.has_next_token() && !tokens.peek_token().is(Token::Type::Comma)) {
|
if (tokens.has_next_token() && !tokens.next_token().is(Token::Type::Comma)) {
|
||||||
auto dimension = parse_dimension(tokens.next_token());
|
auto dimension = parse_dimension(tokens.consume_a_token());
|
||||||
if (!dimension.has_value() || !is_position(*dimension))
|
if (!dimension.has_value() || !is_position(*dimension))
|
||||||
return ElementType::Garbage;
|
return ElementType::Garbage;
|
||||||
*stop_position = get_position(*dimension);
|
*stop_position = get_position(*dimension);
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -83,14 +83,14 @@ Optional<Vector<TElement>> Parser::parse_color_stop_list(TokenStream<ComponentVa
|
||||||
Vector<TElement> color_stops { first_element };
|
Vector<TElement> color_stops { first_element };
|
||||||
while (tokens.has_next_token()) {
|
while (tokens.has_next_token()) {
|
||||||
TElement list_element {};
|
TElement list_element {};
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
if (!tokens.next_token().is(Token::Type::Comma))
|
if (!tokens.consume_a_token().is(Token::Type::Comma))
|
||||||
return {};
|
return {};
|
||||||
auto element_type = parse_color_stop_list_element(list_element);
|
auto element_type = parse_color_stop_list_element(list_element);
|
||||||
if (element_type == ElementType::ColorHint) {
|
if (element_type == ElementType::ColorHint) {
|
||||||
// <color-hint>, <color-stop>
|
// <color-hint>, <color-stop>
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
if (!tokens.next_token().is(Token::Type::Comma))
|
if (!tokens.consume_a_token().is(Token::Type::Comma))
|
||||||
return {};
|
return {};
|
||||||
// Note: This fills in the color stop on the same list_element as the color hint (it does not overwrite it).
|
// Note: This fills in the color stop on the same list_element as the color hint (it does not overwrite it).
|
||||||
if (parse_color_stop_list_element(list_element) != ElementType::ColorStop)
|
if (parse_color_stop_list_element(list_element) != ElementType::ColorStop)
|
||||||
|
@ -140,7 +140,7 @@ RefPtr<CSSStyleValue> Parser::parse_linear_gradient_function(TokenStream<Compone
|
||||||
using GradientType = LinearGradientStyleValue::GradientType;
|
using GradientType = LinearGradientStyleValue::GradientType;
|
||||||
|
|
||||||
auto transaction = outer_tokens.begin_transaction();
|
auto transaction = outer_tokens.begin_transaction();
|
||||||
auto& component_value = outer_tokens.next_token();
|
auto& component_value = outer_tokens.consume_a_token();
|
||||||
|
|
||||||
if (!component_value.is_function())
|
if (!component_value.is_function())
|
||||||
return nullptr;
|
return nullptr;
|
||||||
|
@ -164,7 +164,7 @@ RefPtr<CSSStyleValue> Parser::parse_linear_gradient_function(TokenStream<Compone
|
||||||
// linear-gradient() = linear-gradient([ <angle> | to <side-or-corner> ]?, <color-stop-list>)
|
// linear-gradient() = linear-gradient([ <angle> | to <side-or-corner> ]?, <color-stop-list>)
|
||||||
|
|
||||||
TokenStream tokens { component_value.function().values() };
|
TokenStream tokens { component_value.function().values() };
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
|
|
||||||
if (!tokens.has_next_token())
|
if (!tokens.has_next_token())
|
||||||
return nullptr;
|
return nullptr;
|
||||||
|
@ -194,10 +194,10 @@ RefPtr<CSSStyleValue> Parser::parse_linear_gradient_function(TokenStream<Compone
|
||||||
return token.token().ident().equals_ignoring_ascii_case("to"sv);
|
return token.token().ident().equals_ignoring_ascii_case("to"sv);
|
||||||
};
|
};
|
||||||
|
|
||||||
auto const& first_param = tokens.peek_token();
|
auto const& first_param = tokens.next_token();
|
||||||
if (first_param.is(Token::Type::Dimension)) {
|
if (first_param.is(Token::Type::Dimension)) {
|
||||||
// <angle>
|
// <angle>
|
||||||
tokens.next_token();
|
tokens.discard_a_token();
|
||||||
auto angle_value = first_param.token().dimension_value();
|
auto angle_value = first_param.token().dimension_value();
|
||||||
auto unit_string = first_param.token().dimension_unit();
|
auto unit_string = first_param.token().dimension_unit();
|
||||||
auto angle_type = Angle::unit_from_name(unit_string);
|
auto angle_type = Angle::unit_from_name(unit_string);
|
||||||
|
@ -211,23 +211,23 @@ RefPtr<CSSStyleValue> Parser::parse_linear_gradient_function(TokenStream<Compone
|
||||||
|
|
||||||
// Note: -webkit-linear-gradient does not include to the "to" prefix on the side or corner
|
// Note: -webkit-linear-gradient does not include to the "to" prefix on the side or corner
|
||||||
if (gradient_type == GradientType::Standard) {
|
if (gradient_type == GradientType::Standard) {
|
||||||
tokens.next_token();
|
tokens.discard_a_token();
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
|
|
||||||
if (!tokens.has_next_token())
|
if (!tokens.has_next_token())
|
||||||
return nullptr;
|
return nullptr;
|
||||||
}
|
}
|
||||||
|
|
||||||
// [left | right] || [top | bottom]
|
// [left | right] || [top | bottom]
|
||||||
auto const& first_side = tokens.next_token();
|
auto const& first_side = tokens.consume_a_token();
|
||||||
if (!first_side.is(Token::Type::Ident))
|
if (!first_side.is(Token::Type::Ident))
|
||||||
return nullptr;
|
return nullptr;
|
||||||
|
|
||||||
auto side_a = to_side(first_side.token().ident());
|
auto side_a = to_side(first_side.token().ident());
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
Optional<SideOrCorner> side_b;
|
Optional<SideOrCorner> side_b;
|
||||||
if (tokens.has_next_token() && tokens.peek_token().is(Token::Type::Ident))
|
if (tokens.has_next_token() && tokens.next_token().is(Token::Type::Ident))
|
||||||
side_b = to_side(tokens.next_token().token().ident());
|
side_b = to_side(tokens.consume_a_token().token().ident());
|
||||||
|
|
||||||
if (side_a.has_value() && !side_b.has_value()) {
|
if (side_a.has_value() && !side_b.has_value()) {
|
||||||
gradient_direction = *side_a;
|
gradient_direction = *side_a;
|
||||||
|
@ -252,11 +252,11 @@ RefPtr<CSSStyleValue> Parser::parse_linear_gradient_function(TokenStream<Compone
|
||||||
has_direction_param = false;
|
has_direction_param = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
if (!tokens.has_next_token())
|
if (!tokens.has_next_token())
|
||||||
return nullptr;
|
return nullptr;
|
||||||
|
|
||||||
if (has_direction_param && !tokens.next_token().is(Token::Type::Comma))
|
if (has_direction_param && !tokens.consume_a_token().is(Token::Type::Comma))
|
||||||
return nullptr;
|
return nullptr;
|
||||||
|
|
||||||
auto color_stops = parse_linear_color_stop_list(tokens);
|
auto color_stops = parse_linear_color_stop_list(tokens);
|
||||||
|
@ -270,7 +270,7 @@ RefPtr<CSSStyleValue> Parser::parse_linear_gradient_function(TokenStream<Compone
|
||||||
RefPtr<CSSStyleValue> Parser::parse_conic_gradient_function(TokenStream<ComponentValue>& outer_tokens)
|
RefPtr<CSSStyleValue> Parser::parse_conic_gradient_function(TokenStream<ComponentValue>& outer_tokens)
|
||||||
{
|
{
|
||||||
auto transaction = outer_tokens.begin_transaction();
|
auto transaction = outer_tokens.begin_transaction();
|
||||||
auto& component_value = outer_tokens.next_token();
|
auto& component_value = outer_tokens.consume_a_token();
|
||||||
|
|
||||||
if (!component_value.is_function())
|
if (!component_value.is_function())
|
||||||
return nullptr;
|
return nullptr;
|
||||||
|
@ -287,7 +287,7 @@ RefPtr<CSSStyleValue> Parser::parse_conic_gradient_function(TokenStream<Componen
|
||||||
return nullptr;
|
return nullptr;
|
||||||
|
|
||||||
TokenStream tokens { component_value.function().values() };
|
TokenStream tokens { component_value.function().values() };
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
|
|
||||||
if (!tokens.has_next_token())
|
if (!tokens.has_next_token())
|
||||||
return nullptr;
|
return nullptr;
|
||||||
|
@ -297,7 +297,7 @@ RefPtr<CSSStyleValue> Parser::parse_conic_gradient_function(TokenStream<Componen
|
||||||
|
|
||||||
// conic-gradient( [ [ from <angle> ]? [ at <position> ]? ] ||
|
// conic-gradient( [ [ from <angle> ]? [ at <position> ]? ] ||
|
||||||
// <color-interpolation-method> , <angular-color-stop-list> )
|
// <color-interpolation-method> , <angular-color-stop-list> )
|
||||||
auto token = tokens.peek_token();
|
auto token = tokens.next_token();
|
||||||
bool got_from_angle = false;
|
bool got_from_angle = false;
|
||||||
bool got_color_interpolation_method = false;
|
bool got_color_interpolation_method = false;
|
||||||
bool got_at_position = false;
|
bool got_at_position = false;
|
||||||
|
@ -305,8 +305,8 @@ RefPtr<CSSStyleValue> Parser::parse_conic_gradient_function(TokenStream<Componen
|
||||||
auto consume_identifier = [&](auto identifier) {
|
auto consume_identifier = [&](auto identifier) {
|
||||||
auto token_string = token.token().ident();
|
auto token_string = token.token().ident();
|
||||||
if (token_string.equals_ignoring_ascii_case(identifier)) {
|
if (token_string.equals_ignoring_ascii_case(identifier)) {
|
||||||
(void)tokens.next_token();
|
tokens.discard_a_token();
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
|
@ -319,7 +319,7 @@ RefPtr<CSSStyleValue> Parser::parse_conic_gradient_function(TokenStream<Componen
|
||||||
if (!tokens.has_next_token())
|
if (!tokens.has_next_token())
|
||||||
return nullptr;
|
return nullptr;
|
||||||
|
|
||||||
auto angle_token = tokens.next_token();
|
auto angle_token = tokens.consume_a_token();
|
||||||
if (!angle_token.is(Token::Type::Dimension))
|
if (!angle_token.is(Token::Type::Dimension))
|
||||||
return nullptr;
|
return nullptr;
|
||||||
auto angle = angle_token.token().dimension_value();
|
auto angle = angle_token.token().dimension_value();
|
||||||
|
@ -348,16 +348,16 @@ RefPtr<CSSStyleValue> Parser::parse_conic_gradient_function(TokenStream<Componen
|
||||||
} else {
|
} else {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
if (!tokens.has_next_token())
|
if (!tokens.has_next_token())
|
||||||
return nullptr;
|
return nullptr;
|
||||||
token = tokens.peek_token();
|
token = tokens.next_token();
|
||||||
}
|
}
|
||||||
|
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
if (!tokens.has_next_token())
|
if (!tokens.has_next_token())
|
||||||
return nullptr;
|
return nullptr;
|
||||||
if ((got_from_angle || got_at_position || got_color_interpolation_method) && !tokens.next_token().is(Token::Type::Comma))
|
if ((got_from_angle || got_at_position || got_color_interpolation_method) && !tokens.consume_a_token().is(Token::Type::Comma))
|
||||||
return nullptr;
|
return nullptr;
|
||||||
|
|
||||||
auto color_stops = parse_angular_color_stop_list(tokens);
|
auto color_stops = parse_angular_color_stop_list(tokens);
|
||||||
|
@ -380,7 +380,7 @@ RefPtr<CSSStyleValue> Parser::parse_radial_gradient_function(TokenStream<Compone
|
||||||
using Size = RadialGradientStyleValue::Size;
|
using Size = RadialGradientStyleValue::Size;
|
||||||
|
|
||||||
auto transaction = outer_tokens.begin_transaction();
|
auto transaction = outer_tokens.begin_transaction();
|
||||||
auto& component_value = outer_tokens.next_token();
|
auto& component_value = outer_tokens.consume_a_token();
|
||||||
|
|
||||||
if (!component_value.is_function())
|
if (!component_value.is_function())
|
||||||
return nullptr;
|
return nullptr;
|
||||||
|
@ -397,7 +397,7 @@ RefPtr<CSSStyleValue> Parser::parse_radial_gradient_function(TokenStream<Compone
|
||||||
return nullptr;
|
return nullptr;
|
||||||
|
|
||||||
TokenStream tokens { component_value.function().values() };
|
TokenStream tokens { component_value.function().values() };
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
if (!tokens.has_next_token())
|
if (!tokens.has_next_token())
|
||||||
return nullptr;
|
return nullptr;
|
||||||
|
|
||||||
|
@ -416,8 +416,8 @@ RefPtr<CSSStyleValue> Parser::parse_radial_gradient_function(TokenStream<Compone
|
||||||
|
|
||||||
auto parse_ending_shape = [&]() -> Optional<EndingShape> {
|
auto parse_ending_shape = [&]() -> Optional<EndingShape> {
|
||||||
auto transaction = tokens.begin_transaction();
|
auto transaction = tokens.begin_transaction();
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
auto& token = tokens.next_token();
|
auto& token = tokens.consume_a_token();
|
||||||
if (!token.is(Token::Type::Ident))
|
if (!token.is(Token::Type::Ident))
|
||||||
return {};
|
return {};
|
||||||
auto ident = token.token().ident();
|
auto ident = token.token().ident();
|
||||||
|
@ -446,11 +446,11 @@ RefPtr<CSSStyleValue> Parser::parse_radial_gradient_function(TokenStream<Compone
|
||||||
// <length [0,∞]> |
|
// <length [0,∞]> |
|
||||||
// <length-percentage [0,∞]>{2}
|
// <length-percentage [0,∞]>{2}
|
||||||
auto transaction_size = tokens.begin_transaction();
|
auto transaction_size = tokens.begin_transaction();
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
if (!tokens.has_next_token())
|
if (!tokens.has_next_token())
|
||||||
return {};
|
return {};
|
||||||
if (tokens.peek_token().is(Token::Type::Ident)) {
|
if (tokens.next_token().is(Token::Type::Ident)) {
|
||||||
auto extent = parse_extent_keyword(tokens.next_token().token().ident());
|
auto extent = parse_extent_keyword(tokens.consume_a_token().token().ident());
|
||||||
if (!extent.has_value())
|
if (!extent.has_value())
|
||||||
return {};
|
return {};
|
||||||
return commit_value(*extent, transaction_size);
|
return commit_value(*extent, transaction_size);
|
||||||
|
@ -459,7 +459,7 @@ RefPtr<CSSStyleValue> Parser::parse_radial_gradient_function(TokenStream<Compone
|
||||||
if (!first_radius.has_value())
|
if (!first_radius.has_value())
|
||||||
return {};
|
return {};
|
||||||
auto transaction_second_dimension = tokens.begin_transaction();
|
auto transaction_second_dimension = tokens.begin_transaction();
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
if (tokens.has_next_token()) {
|
if (tokens.has_next_token()) {
|
||||||
auto second_radius = parse_length_percentage(tokens);
|
auto second_radius = parse_length_percentage(tokens);
|
||||||
if (second_radius.has_value())
|
if (second_radius.has_value())
|
||||||
|
@ -494,13 +494,13 @@ RefPtr<CSSStyleValue> Parser::parse_radial_gradient_function(TokenStream<Compone
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
if (!tokens.has_next_token())
|
if (!tokens.has_next_token())
|
||||||
return nullptr;
|
return nullptr;
|
||||||
|
|
||||||
auto& token = tokens.peek_token();
|
auto& token = tokens.next_token();
|
||||||
if (token.is_ident("at"sv)) {
|
if (token.is_ident("at"sv)) {
|
||||||
(void)tokens.next_token();
|
tokens.discard_a_token();
|
||||||
auto position = parse_position_value(tokens);
|
auto position = parse_position_value(tokens);
|
||||||
if (!position)
|
if (!position)
|
||||||
return nullptr;
|
return nullptr;
|
||||||
|
@ -508,10 +508,10 @@ RefPtr<CSSStyleValue> Parser::parse_radial_gradient_function(TokenStream<Compone
|
||||||
expect_comma = true;
|
expect_comma = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
if (!tokens.has_next_token())
|
if (!tokens.has_next_token())
|
||||||
return nullptr;
|
return nullptr;
|
||||||
if (expect_comma && !tokens.next_token().is(Token::Type::Comma))
|
if (expect_comma && !tokens.consume_a_token().is(Token::Type::Comma))
|
||||||
return nullptr;
|
return nullptr;
|
||||||
|
|
||||||
// <color-stop-list>
|
// <color-stop-list>
|
||||||
|
|
|
@ -29,7 +29,7 @@ Vector<NonnullRefPtr<MediaQuery>> Parser::parse_a_media_query_list(TokenStream<T
|
||||||
|
|
||||||
// AD-HOC: Ignore whitespace-only queries
|
// AD-HOC: Ignore whitespace-only queries
|
||||||
// to make `@media {..}` equivalent to `@media all {..}`
|
// to make `@media {..}` equivalent to `@media all {..}`
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
if (!tokens.has_next_token())
|
if (!tokens.has_next_token())
|
||||||
return {};
|
return {};
|
||||||
|
|
||||||
|
@ -64,8 +64,8 @@ NonnullRefPtr<MediaQuery> Parser::parse_media_query(TokenStream<ComponentValue>&
|
||||||
// `[ not | only ]?`, Returns whether to negate the query
|
// `[ not | only ]?`, Returns whether to negate the query
|
||||||
auto parse_initial_modifier = [](auto& tokens) -> Optional<bool> {
|
auto parse_initial_modifier = [](auto& tokens) -> Optional<bool> {
|
||||||
auto transaction = tokens.begin_transaction();
|
auto transaction = tokens.begin_transaction();
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
auto& token = tokens.next_token();
|
auto& token = tokens.consume_a_token();
|
||||||
if (!token.is(Token::Type::Ident))
|
if (!token.is(Token::Type::Ident))
|
||||||
return {};
|
return {};
|
||||||
|
|
||||||
|
@ -92,11 +92,11 @@ NonnullRefPtr<MediaQuery> Parser::parse_media_query(TokenStream<ComponentValue>&
|
||||||
};
|
};
|
||||||
|
|
||||||
auto media_query = MediaQuery::create();
|
auto media_query = MediaQuery::create();
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
|
|
||||||
// `<media-condition>`
|
// `<media-condition>`
|
||||||
if (auto media_condition = parse_media_condition(tokens, MediaCondition::AllowOr::Yes)) {
|
if (auto media_condition = parse_media_condition(tokens, MediaCondition::AllowOr::Yes)) {
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
if (tokens.has_next_token())
|
if (tokens.has_next_token())
|
||||||
return invalid_media_query();
|
return invalid_media_query();
|
||||||
media_query->m_media_condition = move(media_condition);
|
media_query->m_media_condition = move(media_condition);
|
||||||
|
@ -106,13 +106,13 @@ NonnullRefPtr<MediaQuery> Parser::parse_media_query(TokenStream<ComponentValue>&
|
||||||
// `[ not | only ]?`
|
// `[ not | only ]?`
|
||||||
if (auto modifier = parse_initial_modifier(tokens); modifier.has_value()) {
|
if (auto modifier = parse_initial_modifier(tokens); modifier.has_value()) {
|
||||||
media_query->m_negated = modifier.value();
|
media_query->m_negated = modifier.value();
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
}
|
}
|
||||||
|
|
||||||
// `<media-type>`
|
// `<media-type>`
|
||||||
if (auto media_type = parse_media_type(tokens); media_type.has_value()) {
|
if (auto media_type = parse_media_type(tokens); media_type.has_value()) {
|
||||||
media_query->m_media_type = media_type.value();
|
media_query->m_media_type = media_type.value();
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
} else {
|
} else {
|
||||||
return invalid_media_query();
|
return invalid_media_query();
|
||||||
}
|
}
|
||||||
|
@ -121,9 +121,9 @@ NonnullRefPtr<MediaQuery> Parser::parse_media_query(TokenStream<ComponentValue>&
|
||||||
return media_query;
|
return media_query;
|
||||||
|
|
||||||
// `[ and <media-condition-without-or> ]?`
|
// `[ and <media-condition-without-or> ]?`
|
||||||
if (auto maybe_and = tokens.next_token(); maybe_and.is_ident("and"sv)) {
|
if (auto maybe_and = tokens.consume_a_token(); maybe_and.is_ident("and"sv)) {
|
||||||
if (auto media_condition = parse_media_condition(tokens, MediaCondition::AllowOr::No)) {
|
if (auto media_condition = parse_media_condition(tokens, MediaCondition::AllowOr::No)) {
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
if (tokens.has_next_token())
|
if (tokens.has_next_token())
|
||||||
return invalid_media_query();
|
return invalid_media_query();
|
||||||
media_query->m_media_condition = move(media_condition);
|
media_query->m_media_condition = move(media_condition);
|
||||||
|
@ -142,14 +142,14 @@ OwnPtr<MediaCondition> Parser::parse_media_condition(TokenStream<ComponentValue>
|
||||||
{
|
{
|
||||||
// `<media-not> | <media-in-parens> [ <media-and>* | <media-or>* ]`
|
// `<media-not> | <media-in-parens> [ <media-and>* | <media-or>* ]`
|
||||||
auto transaction = tokens.begin_transaction();
|
auto transaction = tokens.begin_transaction();
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
|
|
||||||
// `<media-not> = not <media-in-parens>`
|
// `<media-not> = not <media-in-parens>`
|
||||||
auto parse_media_not = [&](auto& tokens) -> OwnPtr<MediaCondition> {
|
auto parse_media_not = [&](auto& tokens) -> OwnPtr<MediaCondition> {
|
||||||
auto local_transaction = tokens.begin_transaction();
|
auto local_transaction = tokens.begin_transaction();
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
|
|
||||||
auto& first_token = tokens.next_token();
|
auto& first_token = tokens.consume_a_token();
|
||||||
if (first_token.is_ident("not"sv)) {
|
if (first_token.is_ident("not"sv)) {
|
||||||
if (auto child_condition = parse_media_condition(tokens, MediaCondition::AllowOr::Yes)) {
|
if (auto child_condition = parse_media_condition(tokens, MediaCondition::AllowOr::Yes)) {
|
||||||
local_transaction.commit();
|
local_transaction.commit();
|
||||||
|
@ -162,11 +162,11 @@ OwnPtr<MediaCondition> Parser::parse_media_condition(TokenStream<ComponentValue>
|
||||||
|
|
||||||
auto parse_media_with_combinator = [&](auto& tokens, StringView combinator) -> OwnPtr<MediaCondition> {
|
auto parse_media_with_combinator = [&](auto& tokens, StringView combinator) -> OwnPtr<MediaCondition> {
|
||||||
auto local_transaction = tokens.begin_transaction();
|
auto local_transaction = tokens.begin_transaction();
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
|
|
||||||
auto& first = tokens.next_token();
|
auto& first = tokens.consume_a_token();
|
||||||
if (first.is_ident(combinator)) {
|
if (first.is_ident(combinator)) {
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
if (auto media_in_parens = parse_media_in_parens(tokens)) {
|
if (auto media_in_parens = parse_media_in_parens(tokens)) {
|
||||||
local_transaction.commit();
|
local_transaction.commit();
|
||||||
return media_in_parens;
|
return media_in_parens;
|
||||||
|
@ -189,7 +189,7 @@ OwnPtr<MediaCondition> Parser::parse_media_condition(TokenStream<ComponentValue>
|
||||||
|
|
||||||
// `<media-in-parens> [ <media-and>* | <media-or>* ]`
|
// `<media-in-parens> [ <media-and>* | <media-or>* ]`
|
||||||
if (auto maybe_media_in_parens = parse_media_in_parens(tokens)) {
|
if (auto maybe_media_in_parens = parse_media_in_parens(tokens)) {
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
// Only `<media-in-parens>`
|
// Only `<media-in-parens>`
|
||||||
if (!tokens.has_next_token()) {
|
if (!tokens.has_next_token()) {
|
||||||
transaction.commit();
|
transaction.commit();
|
||||||
|
@ -203,11 +203,11 @@ OwnPtr<MediaCondition> Parser::parse_media_condition(TokenStream<ComponentValue>
|
||||||
if (auto media_and = parse_media_and(tokens)) {
|
if (auto media_and = parse_media_and(tokens)) {
|
||||||
child_conditions.append(media_and.release_nonnull());
|
child_conditions.append(media_and.release_nonnull());
|
||||||
|
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
while (tokens.has_next_token()) {
|
while (tokens.has_next_token()) {
|
||||||
if (auto next_media_and = parse_media_and(tokens)) {
|
if (auto next_media_and = parse_media_and(tokens)) {
|
||||||
child_conditions.append(next_media_and.release_nonnull());
|
child_conditions.append(next_media_and.release_nonnull());
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
// We failed - invalid syntax!
|
// We failed - invalid syntax!
|
||||||
|
@ -223,11 +223,11 @@ OwnPtr<MediaCondition> Parser::parse_media_condition(TokenStream<ComponentValue>
|
||||||
if (auto media_or = parse_media_or(tokens)) {
|
if (auto media_or = parse_media_or(tokens)) {
|
||||||
child_conditions.append(media_or.release_nonnull());
|
child_conditions.append(media_or.release_nonnull());
|
||||||
|
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
while (tokens.has_next_token()) {
|
while (tokens.has_next_token()) {
|
||||||
if (auto next_media_or = parse_media_or(tokens)) {
|
if (auto next_media_or = parse_media_or(tokens)) {
|
||||||
child_conditions.append(next_media_or.release_nonnull());
|
child_conditions.append(next_media_or.release_nonnull());
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
// We failed - invalid syntax!
|
// We failed - invalid syntax!
|
||||||
|
@ -247,7 +247,7 @@ OwnPtr<MediaCondition> Parser::parse_media_condition(TokenStream<ComponentValue>
|
||||||
Optional<MediaFeature> Parser::parse_media_feature(TokenStream<ComponentValue>& tokens)
|
Optional<MediaFeature> Parser::parse_media_feature(TokenStream<ComponentValue>& tokens)
|
||||||
{
|
{
|
||||||
// `[ <mf-plain> | <mf-boolean> | <mf-range> ]`
|
// `[ <mf-plain> | <mf-boolean> | <mf-range> ]`
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
|
|
||||||
// `<mf-name> = <ident>`
|
// `<mf-name> = <ident>`
|
||||||
struct MediaFeatureName {
|
struct MediaFeatureName {
|
||||||
|
@ -260,7 +260,7 @@ Optional<MediaFeature> Parser::parse_media_feature(TokenStream<ComponentValue>&
|
||||||
};
|
};
|
||||||
auto parse_mf_name = [](auto& tokens, bool allow_min_max_prefix) -> Optional<MediaFeatureName> {
|
auto parse_mf_name = [](auto& tokens, bool allow_min_max_prefix) -> Optional<MediaFeatureName> {
|
||||||
auto transaction = tokens.begin_transaction();
|
auto transaction = tokens.begin_transaction();
|
||||||
auto& token = tokens.next_token();
|
auto& token = tokens.consume_a_token();
|
||||||
if (token.is(Token::Type::Ident)) {
|
if (token.is(Token::Type::Ident)) {
|
||||||
auto name = token.token().ident();
|
auto name = token.token().ident();
|
||||||
if (auto id = media_feature_id_from_string(name); id.has_value()) {
|
if (auto id = media_feature_id_from_string(name); id.has_value()) {
|
||||||
|
@ -285,10 +285,10 @@ Optional<MediaFeature> Parser::parse_media_feature(TokenStream<ComponentValue>&
|
||||||
// `<mf-boolean> = <mf-name>`
|
// `<mf-boolean> = <mf-name>`
|
||||||
auto parse_mf_boolean = [&](auto& tokens) -> Optional<MediaFeature> {
|
auto parse_mf_boolean = [&](auto& tokens) -> Optional<MediaFeature> {
|
||||||
auto transaction = tokens.begin_transaction();
|
auto transaction = tokens.begin_transaction();
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
|
|
||||||
if (auto maybe_name = parse_mf_name(tokens, false); maybe_name.has_value()) {
|
if (auto maybe_name = parse_mf_name(tokens, false); maybe_name.has_value()) {
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
if (!tokens.has_next_token()) {
|
if (!tokens.has_next_token()) {
|
||||||
transaction.commit();
|
transaction.commit();
|
||||||
return MediaFeature::boolean(maybe_name->id);
|
return MediaFeature::boolean(maybe_name->id);
|
||||||
|
@ -301,14 +301,14 @@ Optional<MediaFeature> Parser::parse_media_feature(TokenStream<ComponentValue>&
|
||||||
// `<mf-plain> = <mf-name> : <mf-value>`
|
// `<mf-plain> = <mf-name> : <mf-value>`
|
||||||
auto parse_mf_plain = [&](auto& tokens) -> Optional<MediaFeature> {
|
auto parse_mf_plain = [&](auto& tokens) -> Optional<MediaFeature> {
|
||||||
auto transaction = tokens.begin_transaction();
|
auto transaction = tokens.begin_transaction();
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
|
|
||||||
if (auto maybe_name = parse_mf_name(tokens, true); maybe_name.has_value()) {
|
if (auto maybe_name = parse_mf_name(tokens, true); maybe_name.has_value()) {
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
if (tokens.next_token().is(Token::Type::Colon)) {
|
if (tokens.consume_a_token().is(Token::Type::Colon)) {
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
if (auto maybe_value = parse_media_feature_value(maybe_name->id, tokens); maybe_value.has_value()) {
|
if (auto maybe_value = parse_media_feature_value(maybe_name->id, tokens); maybe_value.has_value()) {
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
if (!tokens.has_next_token()) {
|
if (!tokens.has_next_token()) {
|
||||||
transaction.commit();
|
transaction.commit();
|
||||||
switch (maybe_name->type) {
|
switch (maybe_name->type) {
|
||||||
|
@ -333,9 +333,9 @@ Optional<MediaFeature> Parser::parse_media_feature(TokenStream<ComponentValue>&
|
||||||
// <mf-comparison> = <mf-lt> | <mf-gt> | <mf-eq>`
|
// <mf-comparison> = <mf-lt> | <mf-gt> | <mf-eq>`
|
||||||
auto parse_comparison = [](auto& tokens) -> Optional<MediaFeature::Comparison> {
|
auto parse_comparison = [](auto& tokens) -> Optional<MediaFeature::Comparison> {
|
||||||
auto transaction = tokens.begin_transaction();
|
auto transaction = tokens.begin_transaction();
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
|
|
||||||
auto& first = tokens.next_token();
|
auto& first = tokens.consume_a_token();
|
||||||
if (first.is(Token::Type::Delim)) {
|
if (first.is(Token::Type::Delim)) {
|
||||||
auto first_delim = first.token().delim();
|
auto first_delim = first.token().delim();
|
||||||
if (first_delim == '=') {
|
if (first_delim == '=') {
|
||||||
|
@ -343,9 +343,9 @@ Optional<MediaFeature> Parser::parse_media_feature(TokenStream<ComponentValue>&
|
||||||
return MediaFeature::Comparison::Equal;
|
return MediaFeature::Comparison::Equal;
|
||||||
}
|
}
|
||||||
if (first_delim == '<') {
|
if (first_delim == '<') {
|
||||||
auto& second = tokens.peek_token();
|
auto& second = tokens.next_token();
|
||||||
if (second.is_delim('=')) {
|
if (second.is_delim('=')) {
|
||||||
tokens.next_token();
|
tokens.discard_a_token();
|
||||||
transaction.commit();
|
transaction.commit();
|
||||||
return MediaFeature::Comparison::LessThanOrEqual;
|
return MediaFeature::Comparison::LessThanOrEqual;
|
||||||
}
|
}
|
||||||
|
@ -353,9 +353,9 @@ Optional<MediaFeature> Parser::parse_media_feature(TokenStream<ComponentValue>&
|
||||||
return MediaFeature::Comparison::LessThan;
|
return MediaFeature::Comparison::LessThan;
|
||||||
}
|
}
|
||||||
if (first_delim == '>') {
|
if (first_delim == '>') {
|
||||||
auto& second = tokens.peek_token();
|
auto& second = tokens.next_token();
|
||||||
if (second.is_delim('=')) {
|
if (second.is_delim('=')) {
|
||||||
tokens.next_token();
|
tokens.discard_a_token();
|
||||||
transaction.commit();
|
transaction.commit();
|
||||||
return MediaFeature::Comparison::GreaterThanOrEqual;
|
return MediaFeature::Comparison::GreaterThanOrEqual;
|
||||||
}
|
}
|
||||||
|
@ -403,16 +403,16 @@ Optional<MediaFeature> Parser::parse_media_feature(TokenStream<ComponentValue>&
|
||||||
// | <mf-value> <mf-gt> <mf-name> <mf-gt> <mf-value>`
|
// | <mf-value> <mf-gt> <mf-name> <mf-gt> <mf-value>`
|
||||||
auto parse_mf_range = [&](auto& tokens) -> Optional<MediaFeature> {
|
auto parse_mf_range = [&](auto& tokens) -> Optional<MediaFeature> {
|
||||||
auto transaction = tokens.begin_transaction();
|
auto transaction = tokens.begin_transaction();
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
|
|
||||||
// `<mf-name> <mf-comparison> <mf-value>`
|
// `<mf-name> <mf-comparison> <mf-value>`
|
||||||
// NOTE: We have to check for <mf-name> first, since all <mf-name>s will also parse as <mf-value>.
|
// NOTE: We have to check for <mf-name> first, since all <mf-name>s will also parse as <mf-value>.
|
||||||
if (auto maybe_name = parse_mf_name(tokens, false); maybe_name.has_value() && media_feature_type_is_range(maybe_name->id)) {
|
if (auto maybe_name = parse_mf_name(tokens, false); maybe_name.has_value() && media_feature_type_is_range(maybe_name->id)) {
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
if (auto maybe_comparison = parse_comparison(tokens); maybe_comparison.has_value()) {
|
if (auto maybe_comparison = parse_comparison(tokens); maybe_comparison.has_value()) {
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
if (auto maybe_value = parse_media_feature_value(maybe_name->id, tokens); maybe_value.has_value()) {
|
if (auto maybe_value = parse_media_feature_value(maybe_name->id, tokens); maybe_value.has_value()) {
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
if (!tokens.has_next_token() && !maybe_value->is_ident()) {
|
if (!tokens.has_next_token() && !maybe_value->is_ident()) {
|
||||||
transaction.commit();
|
transaction.commit();
|
||||||
return MediaFeature::half_range(maybe_value.release_value(), flip(maybe_comparison.release_value()), maybe_name->id);
|
return MediaFeature::half_range(maybe_value.release_value(), flip(maybe_comparison.release_value()), maybe_name->id);
|
||||||
|
@ -435,23 +435,23 @@ Optional<MediaFeature> Parser::parse_media_feature(TokenStream<ComponentValue>&
|
||||||
while (tokens.has_next_token() && !maybe_name.has_value()) {
|
while (tokens.has_next_token() && !maybe_name.has_value()) {
|
||||||
if (auto maybe_comparison = parse_comparison(tokens); maybe_comparison.has_value()) {
|
if (auto maybe_comparison = parse_comparison(tokens); maybe_comparison.has_value()) {
|
||||||
// We found a comparison, so the next non-whitespace token should be the <mf-name>
|
// We found a comparison, so the next non-whitespace token should be the <mf-name>
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
maybe_name = parse_mf_name(tokens, false);
|
maybe_name = parse_mf_name(tokens, false);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
tokens.next_token();
|
tokens.discard_a_token();
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Now, we can parse the range properly.
|
// Now, we can parse the range properly.
|
||||||
if (maybe_name.has_value() && media_feature_type_is_range(maybe_name->id)) {
|
if (maybe_name.has_value() && media_feature_type_is_range(maybe_name->id)) {
|
||||||
if (auto maybe_left_value = parse_media_feature_value(maybe_name->id, tokens); maybe_left_value.has_value()) {
|
if (auto maybe_left_value = parse_media_feature_value(maybe_name->id, tokens); maybe_left_value.has_value()) {
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
if (auto maybe_left_comparison = parse_comparison(tokens); maybe_left_comparison.has_value()) {
|
if (auto maybe_left_comparison = parse_comparison(tokens); maybe_left_comparison.has_value()) {
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
tokens.next_token(); // The <mf-name> which we already parsed above.
|
tokens.discard_a_token(); // The <mf-name> which we already parsed above.
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
|
|
||||||
if (!tokens.has_next_token()) {
|
if (!tokens.has_next_token()) {
|
||||||
transaction.commit();
|
transaction.commit();
|
||||||
|
@ -459,9 +459,9 @@ Optional<MediaFeature> Parser::parse_media_feature(TokenStream<ComponentValue>&
|
||||||
}
|
}
|
||||||
|
|
||||||
if (auto maybe_right_comparison = parse_comparison(tokens); maybe_right_comparison.has_value()) {
|
if (auto maybe_right_comparison = parse_comparison(tokens); maybe_right_comparison.has_value()) {
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
if (auto maybe_right_value = parse_media_feature_value(maybe_name->id, tokens); maybe_right_value.has_value()) {
|
if (auto maybe_right_value = parse_media_feature_value(maybe_name->id, tokens); maybe_right_value.has_value()) {
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
// For this to be valid, the following must be true:
|
// For this to be valid, the following must be true:
|
||||||
// - Comparisons must either both be >/>= or both be </<=.
|
// - Comparisons must either both be >/>= or both be </<=.
|
||||||
// - Neither comparison can be `=`.
|
// - Neither comparison can be `=`.
|
||||||
|
@ -500,8 +500,8 @@ Optional<MediaFeature> Parser::parse_media_feature(TokenStream<ComponentValue>&
|
||||||
Optional<MediaQuery::MediaType> Parser::parse_media_type(TokenStream<ComponentValue>& tokens)
|
Optional<MediaQuery::MediaType> Parser::parse_media_type(TokenStream<ComponentValue>& tokens)
|
||||||
{
|
{
|
||||||
auto transaction = tokens.begin_transaction();
|
auto transaction = tokens.begin_transaction();
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
auto const& token = tokens.next_token();
|
auto const& token = tokens.consume_a_token();
|
||||||
|
|
||||||
if (!token.is(Token::Type::Ident))
|
if (!token.is(Token::Type::Ident))
|
||||||
return {};
|
return {};
|
||||||
|
@ -517,19 +517,19 @@ OwnPtr<MediaCondition> Parser::parse_media_in_parens(TokenStream<ComponentValue>
|
||||||
{
|
{
|
||||||
// `<media-in-parens> = ( <media-condition> ) | ( <media-feature> ) | <general-enclosed>`
|
// `<media-in-parens> = ( <media-condition> ) | ( <media-feature> ) | <general-enclosed>`
|
||||||
auto transaction = tokens.begin_transaction();
|
auto transaction = tokens.begin_transaction();
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
|
|
||||||
// `( <media-condition> ) | ( <media-feature> )`
|
// `( <media-condition> ) | ( <media-feature> )`
|
||||||
auto const& first_token = tokens.peek_token();
|
auto const& first_token = tokens.next_token();
|
||||||
if (first_token.is_block() && first_token.block().is_paren()) {
|
if (first_token.is_block() && first_token.block().is_paren()) {
|
||||||
TokenStream inner_token_stream { first_token.block().values() };
|
TokenStream inner_token_stream { first_token.block().values() };
|
||||||
if (auto maybe_media_condition = parse_media_condition(inner_token_stream, MediaCondition::AllowOr::Yes)) {
|
if (auto maybe_media_condition = parse_media_condition(inner_token_stream, MediaCondition::AllowOr::Yes)) {
|
||||||
tokens.next_token();
|
tokens.discard_a_token();
|
||||||
transaction.commit();
|
transaction.commit();
|
||||||
return maybe_media_condition.release_nonnull();
|
return maybe_media_condition.release_nonnull();
|
||||||
}
|
}
|
||||||
if (auto maybe_media_feature = parse_media_feature(inner_token_stream); maybe_media_feature.has_value()) {
|
if (auto maybe_media_feature = parse_media_feature(inner_token_stream); maybe_media_feature.has_value()) {
|
||||||
tokens.next_token();
|
tokens.discard_a_token();
|
||||||
transaction.commit();
|
transaction.commit();
|
||||||
return MediaCondition::from_feature(maybe_media_feature.release_value());
|
return MediaCondition::from_feature(maybe_media_feature.release_value());
|
||||||
}
|
}
|
||||||
|
@ -553,10 +553,10 @@ Optional<MediaFeatureValue> Parser::parse_media_feature_value(MediaFeatureID med
|
||||||
// NOTE: Calculations are not allowed for media feature values, at least in the current spec, so we reject them.
|
// NOTE: Calculations are not allowed for media feature values, at least in the current spec, so we reject them.
|
||||||
|
|
||||||
// Identifiers
|
// Identifiers
|
||||||
if (tokens.peek_token().is(Token::Type::Ident)) {
|
if (tokens.next_token().is(Token::Type::Ident)) {
|
||||||
auto transaction = tokens.begin_transaction();
|
auto transaction = tokens.begin_transaction();
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
auto keyword = keyword_from_string(tokens.next_token().token().ident());
|
auto keyword = keyword_from_string(tokens.consume_a_token().token().ident());
|
||||||
if (keyword.has_value() && media_feature_accepts_keyword(media_feature, keyword.value())) {
|
if (keyword.has_value() && media_feature_accepts_keyword(media_feature, keyword.value())) {
|
||||||
transaction.commit();
|
transaction.commit();
|
||||||
return MediaFeatureValue(keyword.value());
|
return MediaFeatureValue(keyword.value());
|
||||||
|
@ -568,7 +568,7 @@ Optional<MediaFeatureValue> Parser::parse_media_feature_value(MediaFeatureID med
|
||||||
// Boolean (<mq-boolean> in the spec: a 1 or 0)
|
// Boolean (<mq-boolean> in the spec: a 1 or 0)
|
||||||
if (media_feature_accepts_type(media_feature, MediaFeatureValueType::Boolean)) {
|
if (media_feature_accepts_type(media_feature, MediaFeatureValueType::Boolean)) {
|
||||||
auto transaction = tokens.begin_transaction();
|
auto transaction = tokens.begin_transaction();
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
if (auto integer = parse_integer(tokens); integer.has_value() && !integer->is_calculated()) {
|
if (auto integer = parse_integer(tokens); integer.has_value() && !integer->is_calculated()) {
|
||||||
auto integer_value = integer->value();
|
auto integer_value = integer->value();
|
||||||
if (integer_value == 0 || integer_value == 1) {
|
if (integer_value == 0 || integer_value == 1) {
|
||||||
|
@ -590,7 +590,7 @@ Optional<MediaFeatureValue> Parser::parse_media_feature_value(MediaFeatureID med
|
||||||
// Length
|
// Length
|
||||||
if (media_feature_accepts_type(media_feature, MediaFeatureValueType::Length)) {
|
if (media_feature_accepts_type(media_feature, MediaFeatureValueType::Length)) {
|
||||||
auto transaction = tokens.begin_transaction();
|
auto transaction = tokens.begin_transaction();
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
if (auto length = parse_length(tokens); length.has_value() && !length->is_calculated()) {
|
if (auto length = parse_length(tokens); length.has_value() && !length->is_calculated()) {
|
||||||
transaction.commit();
|
transaction.commit();
|
||||||
return MediaFeatureValue(length->value());
|
return MediaFeatureValue(length->value());
|
||||||
|
@ -600,7 +600,7 @@ Optional<MediaFeatureValue> Parser::parse_media_feature_value(MediaFeatureID med
|
||||||
// Ratio
|
// Ratio
|
||||||
if (media_feature_accepts_type(media_feature, MediaFeatureValueType::Ratio)) {
|
if (media_feature_accepts_type(media_feature, MediaFeatureValueType::Ratio)) {
|
||||||
auto transaction = tokens.begin_transaction();
|
auto transaction = tokens.begin_transaction();
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
if (auto ratio = parse_ratio(tokens); ratio.has_value()) {
|
if (auto ratio = parse_ratio(tokens); ratio.has_value()) {
|
||||||
transaction.commit();
|
transaction.commit();
|
||||||
return MediaFeatureValue(ratio.release_value());
|
return MediaFeatureValue(ratio.release_value());
|
||||||
|
@ -610,7 +610,7 @@ Optional<MediaFeatureValue> Parser::parse_media_feature_value(MediaFeatureID med
|
||||||
// Resolution
|
// Resolution
|
||||||
if (media_feature_accepts_type(media_feature, MediaFeatureValueType::Resolution)) {
|
if (media_feature_accepts_type(media_feature, MediaFeatureValueType::Resolution)) {
|
||||||
auto transaction = tokens.begin_transaction();
|
auto transaction = tokens.begin_transaction();
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
if (auto resolution = parse_resolution(tokens); resolution.has_value() && !resolution->is_calculated()) {
|
if (auto resolution = parse_resolution(tokens); resolution.has_value() && !resolution->is_calculated()) {
|
||||||
transaction.commit();
|
transaction.commit();
|
||||||
return MediaFeatureValue(resolution->value());
|
return MediaFeatureValue(resolution->value());
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -115,11 +115,11 @@ Parser::ParseErrorOr<NonnullRefPtr<Selector>> Parser::parse_complex_selector(Tok
|
||||||
|
|
||||||
Parser::ParseErrorOr<Optional<Selector::CompoundSelector>> Parser::parse_compound_selector(TokenStream<ComponentValue>& tokens)
|
Parser::ParseErrorOr<Optional<Selector::CompoundSelector>> Parser::parse_compound_selector(TokenStream<ComponentValue>& tokens)
|
||||||
{
|
{
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
|
|
||||||
auto combinator = parse_selector_combinator(tokens).value_or(Selector::Combinator::Descendant);
|
auto combinator = parse_selector_combinator(tokens).value_or(Selector::Combinator::Descendant);
|
||||||
|
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
|
|
||||||
Vector<Selector::SimpleSelector> simple_selectors;
|
Vector<Selector::SimpleSelector> simple_selectors;
|
||||||
|
|
||||||
|
@ -138,7 +138,7 @@ Parser::ParseErrorOr<Optional<Selector::CompoundSelector>> Parser::parse_compoun
|
||||||
|
|
||||||
Optional<Selector::Combinator> Parser::parse_selector_combinator(TokenStream<ComponentValue>& tokens)
|
Optional<Selector::Combinator> Parser::parse_selector_combinator(TokenStream<ComponentValue>& tokens)
|
||||||
{
|
{
|
||||||
auto const& current_value = tokens.next_token();
|
auto const& current_value = tokens.consume_a_token();
|
||||||
if (current_value.is(Token::Type::Delim)) {
|
if (current_value.is(Token::Type::Delim)) {
|
||||||
switch (current_value.token().delim()) {
|
switch (current_value.token().delim()) {
|
||||||
case '>':
|
case '>':
|
||||||
|
@ -148,12 +148,12 @@ Optional<Selector::Combinator> Parser::parse_selector_combinator(TokenStream<Com
|
||||||
case '~':
|
case '~':
|
||||||
return Selector::Combinator::SubsequentSibling;
|
return Selector::Combinator::SubsequentSibling;
|
||||||
case '|': {
|
case '|': {
|
||||||
auto const& next = tokens.peek_token();
|
auto const& next = tokens.next_token();
|
||||||
if (next.is(Token::Type::EndOfFile))
|
if (next.is(Token::Type::EndOfFile))
|
||||||
return {};
|
return {};
|
||||||
|
|
||||||
if (next.is_delim('|')) {
|
if (next.is_delim('|')) {
|
||||||
tokens.next_token();
|
tokens.discard_a_token();
|
||||||
return Selector::Combinator::Column;
|
return Selector::Combinator::Column;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -184,11 +184,11 @@ Optional<Selector::SimpleSelector::QualifiedName> Parser::parse_selector_qualifi
|
||||||
|
|
||||||
auto transaction = tokens.begin_transaction();
|
auto transaction = tokens.begin_transaction();
|
||||||
|
|
||||||
auto first_token = tokens.next_token();
|
auto first_token = tokens.consume_a_token();
|
||||||
if (first_token.is_delim('|')) {
|
if (first_token.is_delim('|')) {
|
||||||
// Case 1: `|<name>`
|
// Case 1: `|<name>`
|
||||||
if (is_name(tokens.peek_token())) {
|
if (is_name(tokens.next_token())) {
|
||||||
auto name_token = tokens.next_token();
|
auto name_token = tokens.consume_a_token();
|
||||||
|
|
||||||
if (allow_wildcard_name == AllowWildcardName::No && name_token.is_delim('*'))
|
if (allow_wildcard_name == AllowWildcardName::No && name_token.is_delim('*'))
|
||||||
return {};
|
return {};
|
||||||
|
@ -205,11 +205,11 @@ Optional<Selector::SimpleSelector::QualifiedName> Parser::parse_selector_qualifi
|
||||||
if (!is_name(first_token))
|
if (!is_name(first_token))
|
||||||
return {};
|
return {};
|
||||||
|
|
||||||
if (tokens.peek_token().is_delim('|') && is_name(tokens.peek_token(1))) {
|
if (tokens.next_token().is_delim('|') && is_name(tokens.peek_token(1))) {
|
||||||
// Case 2: `<namespace>|<name>`
|
// Case 2: `<namespace>|<name>`
|
||||||
(void)tokens.next_token(); // `|`
|
tokens.discard_a_token(); // `|`
|
||||||
auto namespace_ = get_name(first_token);
|
auto namespace_ = get_name(first_token);
|
||||||
auto name = get_name(tokens.next_token());
|
auto name = get_name(tokens.consume_a_token());
|
||||||
|
|
||||||
if (allow_wildcard_name == AllowWildcardName::No && name == "*"sv)
|
if (allow_wildcard_name == AllowWildcardName::No && name == "*"sv)
|
||||||
return {};
|
return {};
|
||||||
|
@ -242,7 +242,7 @@ Parser::ParseErrorOr<Selector::SimpleSelector> Parser::parse_attribute_simple_se
|
||||||
{
|
{
|
||||||
auto attribute_tokens = TokenStream { first_value.block().values() };
|
auto attribute_tokens = TokenStream { first_value.block().values() };
|
||||||
|
|
||||||
attribute_tokens.skip_whitespace();
|
attribute_tokens.discard_whitespace();
|
||||||
|
|
||||||
if (!attribute_tokens.has_next_token()) {
|
if (!attribute_tokens.has_next_token()) {
|
||||||
dbgln_if(CSS_PARSER_DEBUG, "CSS attribute selector is empty!");
|
dbgln_if(CSS_PARSER_DEBUG, "CSS attribute selector is empty!");
|
||||||
|
@ -251,7 +251,7 @@ Parser::ParseErrorOr<Selector::SimpleSelector> Parser::parse_attribute_simple_se
|
||||||
|
|
||||||
auto maybe_qualified_name = parse_selector_qualified_name(attribute_tokens, AllowWildcardName::No);
|
auto maybe_qualified_name = parse_selector_qualified_name(attribute_tokens, AllowWildcardName::No);
|
||||||
if (!maybe_qualified_name.has_value()) {
|
if (!maybe_qualified_name.has_value()) {
|
||||||
dbgln_if(CSS_PARSER_DEBUG, "Expected qualified-name for attribute name, got: '{}'", attribute_tokens.peek_token().to_debug_string());
|
dbgln_if(CSS_PARSER_DEBUG, "Expected qualified-name for attribute name, got: '{}'", attribute_tokens.next_token().to_debug_string());
|
||||||
return ParseError::SyntaxError;
|
return ParseError::SyntaxError;
|
||||||
}
|
}
|
||||||
auto qualified_name = maybe_qualified_name.release_value();
|
auto qualified_name = maybe_qualified_name.release_value();
|
||||||
|
@ -265,11 +265,11 @@ Parser::ParseErrorOr<Selector::SimpleSelector> Parser::parse_attribute_simple_se
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
attribute_tokens.skip_whitespace();
|
attribute_tokens.discard_whitespace();
|
||||||
if (!attribute_tokens.has_next_token())
|
if (!attribute_tokens.has_next_token())
|
||||||
return simple_selector;
|
return simple_selector;
|
||||||
|
|
||||||
auto const& delim_part = attribute_tokens.next_token();
|
auto const& delim_part = attribute_tokens.consume_a_token();
|
||||||
if (!delim_part.is(Token::Type::Delim)) {
|
if (!delim_part.is(Token::Type::Delim)) {
|
||||||
dbgln_if(CSS_PARSER_DEBUG, "Expected a delim for attribute comparison, got: '{}'", delim_part.to_debug_string());
|
dbgln_if(CSS_PARSER_DEBUG, "Expected a delim for attribute comparison, got: '{}'", delim_part.to_debug_string());
|
||||||
return ParseError::SyntaxError;
|
return ParseError::SyntaxError;
|
||||||
|
@ -283,7 +283,7 @@ Parser::ParseErrorOr<Selector::SimpleSelector> Parser::parse_attribute_simple_se
|
||||||
return ParseError::SyntaxError;
|
return ParseError::SyntaxError;
|
||||||
}
|
}
|
||||||
|
|
||||||
auto const& delim_second_part = attribute_tokens.next_token();
|
auto const& delim_second_part = attribute_tokens.consume_a_token();
|
||||||
if (!delim_second_part.is_delim('=')) {
|
if (!delim_second_part.is_delim('=')) {
|
||||||
dbgln_if(CSS_PARSER_DEBUG, "Expected a double delim for attribute comparison, got: '{}{}'", delim_part.to_debug_string(), delim_second_part.to_debug_string());
|
dbgln_if(CSS_PARSER_DEBUG, "Expected a double delim for attribute comparison, got: '{}{}'", delim_part.to_debug_string(), delim_second_part.to_debug_string());
|
||||||
return ParseError::SyntaxError;
|
return ParseError::SyntaxError;
|
||||||
|
@ -309,13 +309,13 @@ Parser::ParseErrorOr<Selector::SimpleSelector> Parser::parse_attribute_simple_se
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
attribute_tokens.skip_whitespace();
|
attribute_tokens.discard_whitespace();
|
||||||
if (!attribute_tokens.has_next_token()) {
|
if (!attribute_tokens.has_next_token()) {
|
||||||
dbgln_if(CSS_PARSER_DEBUG, "Attribute selector ended without a value to match.");
|
dbgln_if(CSS_PARSER_DEBUG, "Attribute selector ended without a value to match.");
|
||||||
return ParseError::SyntaxError;
|
return ParseError::SyntaxError;
|
||||||
}
|
}
|
||||||
|
|
||||||
auto const& value_part = attribute_tokens.next_token();
|
auto const& value_part = attribute_tokens.consume_a_token();
|
||||||
if (!value_part.is(Token::Type::Ident) && !value_part.is(Token::Type::String)) {
|
if (!value_part.is(Token::Type::Ident) && !value_part.is(Token::Type::String)) {
|
||||||
dbgln_if(CSS_PARSER_DEBUG, "Expected a string or ident for the value to match attribute against, got: '{}'", value_part.to_debug_string());
|
dbgln_if(CSS_PARSER_DEBUG, "Expected a string or ident for the value to match attribute against, got: '{}'", value_part.to_debug_string());
|
||||||
return ParseError::SyntaxError;
|
return ParseError::SyntaxError;
|
||||||
|
@ -323,10 +323,10 @@ Parser::ParseErrorOr<Selector::SimpleSelector> Parser::parse_attribute_simple_se
|
||||||
auto const& value_string = value_part.token().is(Token::Type::Ident) ? value_part.token().ident() : value_part.token().string();
|
auto const& value_string = value_part.token().is(Token::Type::Ident) ? value_part.token().ident() : value_part.token().string();
|
||||||
simple_selector.attribute().value = value_string.to_string();
|
simple_selector.attribute().value = value_string.to_string();
|
||||||
|
|
||||||
attribute_tokens.skip_whitespace();
|
attribute_tokens.discard_whitespace();
|
||||||
// Handle case-sensitivity suffixes. https://www.w3.org/TR/selectors-4/#attribute-case
|
// Handle case-sensitivity suffixes. https://www.w3.org/TR/selectors-4/#attribute-case
|
||||||
if (attribute_tokens.has_next_token()) {
|
if (attribute_tokens.has_next_token()) {
|
||||||
auto const& case_sensitivity_part = attribute_tokens.next_token();
|
auto const& case_sensitivity_part = attribute_tokens.consume_a_token();
|
||||||
if (case_sensitivity_part.is(Token::Type::Ident)) {
|
if (case_sensitivity_part.is(Token::Type::Ident)) {
|
||||||
auto case_sensitivity = case_sensitivity_part.token().ident();
|
auto case_sensitivity = case_sensitivity_part.token().ident();
|
||||||
if (case_sensitivity.equals_ignoring_ascii_case("i"sv)) {
|
if (case_sensitivity.equals_ignoring_ascii_case("i"sv)) {
|
||||||
|
@ -354,7 +354,7 @@ Parser::ParseErrorOr<Selector::SimpleSelector> Parser::parse_attribute_simple_se
|
||||||
Parser::ParseErrorOr<Selector::SimpleSelector> Parser::parse_pseudo_simple_selector(TokenStream<ComponentValue>& tokens)
|
Parser::ParseErrorOr<Selector::SimpleSelector> Parser::parse_pseudo_simple_selector(TokenStream<ComponentValue>& tokens)
|
||||||
{
|
{
|
||||||
auto peek_token_ends_selector = [&]() -> bool {
|
auto peek_token_ends_selector = [&]() -> bool {
|
||||||
auto const& value = tokens.peek_token();
|
auto const& value = tokens.next_token();
|
||||||
return (value.is(Token::Type::EndOfFile) || value.is(Token::Type::Whitespace) || value.is(Token::Type::Comma));
|
return (value.is(Token::Type::EndOfFile) || value.is(Token::Type::Whitespace) || value.is(Token::Type::Comma));
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -362,15 +362,15 @@ Parser::ParseErrorOr<Selector::SimpleSelector> Parser::parse_pseudo_simple_selec
|
||||||
return ParseError::SyntaxError;
|
return ParseError::SyntaxError;
|
||||||
|
|
||||||
bool is_pseudo = false;
|
bool is_pseudo = false;
|
||||||
if (tokens.peek_token().is(Token::Type::Colon)) {
|
if (tokens.next_token().is(Token::Type::Colon)) {
|
||||||
is_pseudo = true;
|
is_pseudo = true;
|
||||||
tokens.next_token();
|
tokens.discard_a_token();
|
||||||
if (peek_token_ends_selector())
|
if (peek_token_ends_selector())
|
||||||
return ParseError::SyntaxError;
|
return ParseError::SyntaxError;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (is_pseudo) {
|
if (is_pseudo) {
|
||||||
auto const& name_token = tokens.next_token();
|
auto const& name_token = tokens.consume_a_token();
|
||||||
if (!name_token.is(Token::Type::Ident)) {
|
if (!name_token.is(Token::Type::Ident)) {
|
||||||
dbgln_if(CSS_PARSER_DEBUG, "Expected an ident for pseudo-element, got: '{}'", name_token.to_debug_string());
|
dbgln_if(CSS_PARSER_DEBUG, "Expected an ident for pseudo-element, got: '{}'", name_token.to_debug_string());
|
||||||
return ParseError::SyntaxError;
|
return ParseError::SyntaxError;
|
||||||
|
@ -409,7 +409,7 @@ Parser::ParseErrorOr<Selector::SimpleSelector> Parser::parse_pseudo_simple_selec
|
||||||
if (peek_token_ends_selector())
|
if (peek_token_ends_selector())
|
||||||
return ParseError::SyntaxError;
|
return ParseError::SyntaxError;
|
||||||
|
|
||||||
auto const& pseudo_class_token = tokens.next_token();
|
auto const& pseudo_class_token = tokens.consume_a_token();
|
||||||
|
|
||||||
if (pseudo_class_token.is(Token::Type::Ident)) {
|
if (pseudo_class_token.is(Token::Type::Ident)) {
|
||||||
auto pseudo_name = pseudo_class_token.token().ident();
|
auto pseudo_name = pseudo_class_token.token().ident();
|
||||||
|
@ -461,7 +461,7 @@ Parser::ParseErrorOr<Selector::SimpleSelector> Parser::parse_pseudo_simple_selec
|
||||||
return ParseError::SyntaxError;
|
return ParseError::SyntaxError;
|
||||||
}
|
}
|
||||||
|
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
if (!tokens.has_next_token()) {
|
if (!tokens.has_next_token()) {
|
||||||
return Selector::SimpleSelector {
|
return Selector::SimpleSelector {
|
||||||
.type = Selector::SimpleSelector::Type::PseudoClass,
|
.type = Selector::SimpleSelector::Type::PseudoClass,
|
||||||
|
@ -475,14 +475,14 @@ Parser::ParseErrorOr<Selector::SimpleSelector> Parser::parse_pseudo_simple_selec
|
||||||
return ParseError::SyntaxError;
|
return ParseError::SyntaxError;
|
||||||
|
|
||||||
// Parse the `of <selector-list>` syntax
|
// Parse the `of <selector-list>` syntax
|
||||||
auto const& maybe_of = tokens.next_token();
|
auto const& maybe_of = tokens.consume_a_token();
|
||||||
if (!maybe_of.is_ident("of"sv))
|
if (!maybe_of.is_ident("of"sv))
|
||||||
return ParseError::SyntaxError;
|
return ParseError::SyntaxError;
|
||||||
|
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
auto selector_list = TRY(parse_a_selector_list(tokens, SelectorType::Standalone));
|
auto selector_list = TRY(parse_a_selector_list(tokens, SelectorType::Standalone));
|
||||||
|
|
||||||
tokens.skip_whitespace();
|
tokens.discard_whitespace();
|
||||||
if (tokens.has_next_token())
|
if (tokens.has_next_token())
|
||||||
return ParseError::SyntaxError;
|
return ParseError::SyntaxError;
|
||||||
|
|
||||||
|
@ -558,9 +558,9 @@ Parser::ParseErrorOr<Selector::SimpleSelector> Parser::parse_pseudo_simple_selec
|
||||||
}
|
}
|
||||||
case PseudoClassMetadata::ParameterType::Ident: {
|
case PseudoClassMetadata::ParameterType::Ident: {
|
||||||
auto function_token_stream = TokenStream(pseudo_function.values());
|
auto function_token_stream = TokenStream(pseudo_function.values());
|
||||||
function_token_stream.skip_whitespace();
|
function_token_stream.discard_whitespace();
|
||||||
auto maybe_keyword_token = function_token_stream.next_token();
|
auto maybe_keyword_token = function_token_stream.consume_a_token();
|
||||||
function_token_stream.skip_whitespace();
|
function_token_stream.discard_whitespace();
|
||||||
if (!maybe_keyword_token.is(Token::Type::Ident) || function_token_stream.has_next_token()) {
|
if (!maybe_keyword_token.is(Token::Type::Ident) || function_token_stream.has_next_token()) {
|
||||||
dbgln_if(CSS_PARSER_DEBUG, "Failed to parse :{}() parameter as a keyword: not an ident", pseudo_function.name());
|
dbgln_if(CSS_PARSER_DEBUG, "Failed to parse :{}() parameter as a keyword: not an ident", pseudo_function.name());
|
||||||
return ParseError::SyntaxError;
|
return ParseError::SyntaxError;
|
||||||
|
@ -586,8 +586,8 @@ Parser::ParseErrorOr<Selector::SimpleSelector> Parser::parse_pseudo_simple_selec
|
||||||
|
|
||||||
for (auto language_token_list : language_token_lists) {
|
for (auto language_token_list : language_token_lists) {
|
||||||
auto language_token_stream = TokenStream(language_token_list);
|
auto language_token_stream = TokenStream(language_token_list);
|
||||||
language_token_stream.skip_whitespace();
|
language_token_stream.discard_whitespace();
|
||||||
auto language_token = language_token_stream.next_token();
|
auto language_token = language_token_stream.consume_a_token();
|
||||||
if (!(language_token.is(Token::Type::Ident) || language_token.is(Token::Type::String))) {
|
if (!(language_token.is(Token::Type::Ident) || language_token.is(Token::Type::String))) {
|
||||||
dbgln_if(CSS_PARSER_DEBUG, "Invalid language range in :{}() - not a string/ident", pseudo_function.name());
|
dbgln_if(CSS_PARSER_DEBUG, "Invalid language range in :{}() - not a string/ident", pseudo_function.name());
|
||||||
return ParseError::SyntaxError;
|
return ParseError::SyntaxError;
|
||||||
|
@ -596,7 +596,7 @@ Parser::ParseErrorOr<Selector::SimpleSelector> Parser::parse_pseudo_simple_selec
|
||||||
auto language_string = language_token.is(Token::Type::String) ? language_token.token().string() : language_token.token().ident();
|
auto language_string = language_token.is(Token::Type::String) ? language_token.token().string() : language_token.token().ident();
|
||||||
languages.append(language_string);
|
languages.append(language_string);
|
||||||
|
|
||||||
language_token_stream.skip_whitespace();
|
language_token_stream.discard_whitespace();
|
||||||
if (language_token_stream.has_next_token()) {
|
if (language_token_stream.has_next_token()) {
|
||||||
dbgln_if(CSS_PARSER_DEBUG, "Invalid language range in :{}() - trailing tokens", pseudo_function.name());
|
dbgln_if(CSS_PARSER_DEBUG, "Invalid language range in :{}() - trailing tokens", pseudo_function.name());
|
||||||
return ParseError::SyntaxError;
|
return ParseError::SyntaxError;
|
||||||
|
@ -633,7 +633,7 @@ Parser::ParseErrorOr<Selector::SimpleSelector> Parser::parse_pseudo_simple_selec
|
||||||
Parser::ParseErrorOr<Optional<Selector::SimpleSelector>> Parser::parse_simple_selector(TokenStream<ComponentValue>& tokens)
|
Parser::ParseErrorOr<Optional<Selector::SimpleSelector>> Parser::parse_simple_selector(TokenStream<ComponentValue>& tokens)
|
||||||
{
|
{
|
||||||
auto peek_token_ends_selector = [&]() -> bool {
|
auto peek_token_ends_selector = [&]() -> bool {
|
||||||
auto const& value = tokens.peek_token();
|
auto const& value = tokens.next_token();
|
||||||
return (value.is(Token::Type::EndOfFile) || value.is(Token::Type::Whitespace) || value.is(Token::Type::Comma));
|
return (value.is(Token::Type::EndOfFile) || value.is(Token::Type::Whitespace) || value.is(Token::Type::Comma));
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -654,7 +654,7 @@ Parser::ParseErrorOr<Optional<Selector::SimpleSelector>> Parser::parse_simple_se
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
auto const& first_value = tokens.next_token();
|
auto const& first_value = tokens.consume_a_token();
|
||||||
|
|
||||||
if (first_value.is(Token::Type::Delim)) {
|
if (first_value.is(Token::Type::Delim)) {
|
||||||
u32 delim = first_value.token().delim();
|
u32 delim = first_value.token().delim();
|
||||||
|
@ -666,7 +666,7 @@ Parser::ParseErrorOr<Optional<Selector::SimpleSelector>> Parser::parse_simple_se
|
||||||
if (peek_token_ends_selector())
|
if (peek_token_ends_selector())
|
||||||
return ParseError::SyntaxError;
|
return ParseError::SyntaxError;
|
||||||
|
|
||||||
auto const& class_name_value = tokens.next_token();
|
auto const& class_name_value = tokens.consume_a_token();
|
||||||
if (!class_name_value.is(Token::Type::Ident)) {
|
if (!class_name_value.is(Token::Type::Ident)) {
|
||||||
dbgln_if(CSS_PARSER_DEBUG, "Expected an ident after '.', got: {}", class_name_value.to_debug_string());
|
dbgln_if(CSS_PARSER_DEBUG, "Expected an ident after '.', got: {}", class_name_value.to_debug_string());
|
||||||
return ParseError::SyntaxError;
|
return ParseError::SyntaxError;
|
||||||
|
@ -796,8 +796,8 @@ Optional<Selector::SimpleSelector::ANPlusBPattern> Parser::parse_a_n_plus_b_patt
|
||||||
// https://www.w3.org/TR/css-syntax-3/#the-anb-type
|
// https://www.w3.org/TR/css-syntax-3/#the-anb-type
|
||||||
// Unfortunately these can't be in the same order as in the spec.
|
// Unfortunately these can't be in the same order as in the spec.
|
||||||
|
|
||||||
values.skip_whitespace();
|
values.discard_whitespace();
|
||||||
auto const& first_value = values.next_token();
|
auto const& first_value = values.consume_a_token();
|
||||||
|
|
||||||
// odd | even
|
// odd | even
|
||||||
if (first_value.is(Token::Type::Ident)) {
|
if (first_value.is(Token::Type::Ident)) {
|
||||||
|
@ -822,11 +822,11 @@ Optional<Selector::SimpleSelector::ANPlusBPattern> Parser::parse_a_n_plus_b_patt
|
||||||
// <n-dimension> ['+' | '-'] <signless-integer>
|
// <n-dimension> ['+' | '-'] <signless-integer>
|
||||||
if (is_n_dimension(first_value)) {
|
if (is_n_dimension(first_value)) {
|
||||||
int a = first_value.token().dimension_value_int();
|
int a = first_value.token().dimension_value_int();
|
||||||
values.skip_whitespace();
|
values.discard_whitespace();
|
||||||
|
|
||||||
// <n-dimension> <signed-integer>
|
// <n-dimension> <signed-integer>
|
||||||
if (is_signed_integer(values.peek_token())) {
|
if (is_signed_integer(values.next_token())) {
|
||||||
int b = values.next_token().token().to_integer();
|
int b = values.consume_a_token().token().to_integer();
|
||||||
transaction.commit();
|
transaction.commit();
|
||||||
return Selector::SimpleSelector::ANPlusBPattern { a, b };
|
return Selector::SimpleSelector::ANPlusBPattern { a, b };
|
||||||
}
|
}
|
||||||
|
@ -834,9 +834,9 @@ Optional<Selector::SimpleSelector::ANPlusBPattern> Parser::parse_a_n_plus_b_patt
|
||||||
// <n-dimension> ['+' | '-'] <signless-integer>
|
// <n-dimension> ['+' | '-'] <signless-integer>
|
||||||
{
|
{
|
||||||
auto child_transaction = transaction.create_child();
|
auto child_transaction = transaction.create_child();
|
||||||
auto const& second_value = values.next_token();
|
auto const& second_value = values.consume_a_token();
|
||||||
values.skip_whitespace();
|
values.discard_whitespace();
|
||||||
auto const& third_value = values.next_token();
|
auto const& third_value = values.consume_a_token();
|
||||||
|
|
||||||
if (is_sign(second_value) && is_signless_integer(third_value)) {
|
if (is_sign(second_value) && is_signless_integer(third_value)) {
|
||||||
int b = third_value.token().to_integer() * (second_value.is_delim('+') ? 1 : -1);
|
int b = third_value.token().to_integer() * (second_value.is_delim('+') ? 1 : -1);
|
||||||
|
@ -851,8 +851,8 @@ Optional<Selector::SimpleSelector::ANPlusBPattern> Parser::parse_a_n_plus_b_patt
|
||||||
}
|
}
|
||||||
// <ndash-dimension> <signless-integer>
|
// <ndash-dimension> <signless-integer>
|
||||||
if (is_ndash_dimension(first_value)) {
|
if (is_ndash_dimension(first_value)) {
|
||||||
values.skip_whitespace();
|
values.discard_whitespace();
|
||||||
auto const& second_value = values.next_token();
|
auto const& second_value = values.consume_a_token();
|
||||||
if (is_signless_integer(second_value)) {
|
if (is_signless_integer(second_value)) {
|
||||||
int a = first_value.token().dimension_value_int();
|
int a = first_value.token().dimension_value_int();
|
||||||
int b = -second_value.token().to_integer();
|
int b = -second_value.token().to_integer();
|
||||||
|
@ -888,11 +888,11 @@ Optional<Selector::SimpleSelector::ANPlusBPattern> Parser::parse_a_n_plus_b_patt
|
||||||
// -n <signed-integer>
|
// -n <signed-integer>
|
||||||
// -n ['+' | '-'] <signless-integer>
|
// -n ['+' | '-'] <signless-integer>
|
||||||
if (first_value.is_ident("-n"sv)) {
|
if (first_value.is_ident("-n"sv)) {
|
||||||
values.skip_whitespace();
|
values.discard_whitespace();
|
||||||
|
|
||||||
// -n <signed-integer>
|
// -n <signed-integer>
|
||||||
if (is_signed_integer(values.peek_token())) {
|
if (is_signed_integer(values.next_token())) {
|
||||||
int b = values.next_token().token().to_integer();
|
int b = values.consume_a_token().token().to_integer();
|
||||||
transaction.commit();
|
transaction.commit();
|
||||||
return Selector::SimpleSelector::ANPlusBPattern { -1, b };
|
return Selector::SimpleSelector::ANPlusBPattern { -1, b };
|
||||||
}
|
}
|
||||||
|
@ -900,9 +900,9 @@ Optional<Selector::SimpleSelector::ANPlusBPattern> Parser::parse_a_n_plus_b_patt
|
||||||
// -n ['+' | '-'] <signless-integer>
|
// -n ['+' | '-'] <signless-integer>
|
||||||
{
|
{
|
||||||
auto child_transaction = transaction.create_child();
|
auto child_transaction = transaction.create_child();
|
||||||
auto const& second_value = values.next_token();
|
auto const& second_value = values.consume_a_token();
|
||||||
values.skip_whitespace();
|
values.discard_whitespace();
|
||||||
auto const& third_value = values.next_token();
|
auto const& third_value = values.consume_a_token();
|
||||||
|
|
||||||
if (is_sign(second_value) && is_signless_integer(third_value)) {
|
if (is_sign(second_value) && is_signless_integer(third_value)) {
|
||||||
int b = third_value.token().to_integer() * (second_value.is_delim('+') ? 1 : -1);
|
int b = third_value.token().to_integer() * (second_value.is_delim('+') ? 1 : -1);
|
||||||
|
@ -917,8 +917,8 @@ Optional<Selector::SimpleSelector::ANPlusBPattern> Parser::parse_a_n_plus_b_patt
|
||||||
}
|
}
|
||||||
// -n- <signless-integer>
|
// -n- <signless-integer>
|
||||||
if (first_value.is_ident("-n-"sv)) {
|
if (first_value.is_ident("-n-"sv)) {
|
||||||
values.skip_whitespace();
|
values.discard_whitespace();
|
||||||
auto const& second_value = values.next_token();
|
auto const& second_value = values.consume_a_token();
|
||||||
if (is_signless_integer(second_value)) {
|
if (is_signless_integer(second_value)) {
|
||||||
int b = -second_value.token().to_integer();
|
int b = -second_value.token().to_integer();
|
||||||
transaction.commit();
|
transaction.commit();
|
||||||
|
@ -941,16 +941,16 @@ Optional<Selector::SimpleSelector::ANPlusBPattern> Parser::parse_a_n_plus_b_patt
|
||||||
// We do *not* skip whitespace here.
|
// We do *not* skip whitespace here.
|
||||||
}
|
}
|
||||||
|
|
||||||
auto const& first_after_plus = values.next_token();
|
auto const& first_after_plus = values.consume_a_token();
|
||||||
// '+'?† n
|
// '+'?† n
|
||||||
// '+'?† n <signed-integer>
|
// '+'?† n <signed-integer>
|
||||||
// '+'?† n ['+' | '-'] <signless-integer>
|
// '+'?† n ['+' | '-'] <signless-integer>
|
||||||
if (first_after_plus.is_ident("n"sv)) {
|
if (first_after_plus.is_ident("n"sv)) {
|
||||||
values.skip_whitespace();
|
values.discard_whitespace();
|
||||||
|
|
||||||
// '+'?† n <signed-integer>
|
// '+'?† n <signed-integer>
|
||||||
if (is_signed_integer(values.peek_token())) {
|
if (is_signed_integer(values.next_token())) {
|
||||||
int b = values.next_token().token().to_integer();
|
int b = values.consume_a_token().token().to_integer();
|
||||||
transaction.commit();
|
transaction.commit();
|
||||||
return Selector::SimpleSelector::ANPlusBPattern { 1, b };
|
return Selector::SimpleSelector::ANPlusBPattern { 1, b };
|
||||||
}
|
}
|
||||||
|
@ -958,9 +958,9 @@ Optional<Selector::SimpleSelector::ANPlusBPattern> Parser::parse_a_n_plus_b_patt
|
||||||
// '+'?† n ['+' | '-'] <signless-integer>
|
// '+'?† n ['+' | '-'] <signless-integer>
|
||||||
{
|
{
|
||||||
auto child_transaction = transaction.create_child();
|
auto child_transaction = transaction.create_child();
|
||||||
auto const& second_value = values.next_token();
|
auto const& second_value = values.consume_a_token();
|
||||||
values.skip_whitespace();
|
values.discard_whitespace();
|
||||||
auto const& third_value = values.next_token();
|
auto const& third_value = values.consume_a_token();
|
||||||
|
|
||||||
if (is_sign(second_value) && is_signless_integer(third_value)) {
|
if (is_sign(second_value) && is_signless_integer(third_value)) {
|
||||||
int b = third_value.token().to_integer() * (second_value.is_delim('+') ? 1 : -1);
|
int b = third_value.token().to_integer() * (second_value.is_delim('+') ? 1 : -1);
|
||||||
|
@ -976,8 +976,8 @@ Optional<Selector::SimpleSelector::ANPlusBPattern> Parser::parse_a_n_plus_b_patt
|
||||||
|
|
||||||
// '+'?† n- <signless-integer>
|
// '+'?† n- <signless-integer>
|
||||||
if (first_after_plus.is_ident("n-"sv)) {
|
if (first_after_plus.is_ident("n-"sv)) {
|
||||||
values.skip_whitespace();
|
values.discard_whitespace();
|
||||||
auto const& second_value = values.next_token();
|
auto const& second_value = values.consume_a_token();
|
||||||
if (is_signless_integer(second_value)) {
|
if (is_signless_integer(second_value)) {
|
||||||
int b = -second_value.token().to_integer();
|
int b = -second_value.token().to_integer();
|
||||||
transaction.commit();
|
transaction.commit();
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
/*
|
/*
|
||||||
* Copyright (c) 2020-2021, the SerenityOS developers.
|
* Copyright (c) 2020-2021, the SerenityOS developers.
|
||||||
* Copyright (c) 2021-2023, Sam Atkins <atkinssj@serenityos.org>
|
* Copyright (c) 2021-2024, Sam Atkins <sam@ladybird.org>
|
||||||
*
|
*
|
||||||
* SPDX-License-Identifier: BSD-2-Clause
|
* SPDX-License-Identifier: BSD-2-Clause
|
||||||
*/
|
*/
|
||||||
|
@ -14,6 +14,7 @@
|
||||||
|
|
||||||
namespace Web::CSS::Parser {
|
namespace Web::CSS::Parser {
|
||||||
|
|
||||||
|
// https://drafts.csswg.org/css-syntax/#css-token-stream
|
||||||
template<typename T>
|
template<typename T>
|
||||||
class TokenStream {
|
class TokenStream {
|
||||||
public:
|
public:
|
||||||
|
@ -21,14 +22,14 @@ public:
|
||||||
public:
|
public:
|
||||||
explicit StateTransaction(TokenStream<T>& token_stream)
|
explicit StateTransaction(TokenStream<T>& token_stream)
|
||||||
: m_token_stream(token_stream)
|
: m_token_stream(token_stream)
|
||||||
, m_saved_iterator_offset(token_stream.m_iterator_offset)
|
, m_saved_index(token_stream.m_index)
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
~StateTransaction()
|
~StateTransaction()
|
||||||
{
|
{
|
||||||
if (!m_commit)
|
if (!m_commit)
|
||||||
m_token_stream.m_iterator_offset = m_saved_iterator_offset;
|
m_token_stream.m_index = m_saved_index;
|
||||||
}
|
}
|
||||||
|
|
||||||
StateTransaction create_child() { return StateTransaction(*this); }
|
StateTransaction create_child() { return StateTransaction(*this); }
|
||||||
|
@ -44,13 +45,13 @@ public:
|
||||||
explicit StateTransaction(StateTransaction& parent)
|
explicit StateTransaction(StateTransaction& parent)
|
||||||
: m_parent(&parent)
|
: m_parent(&parent)
|
||||||
, m_token_stream(parent.m_token_stream)
|
, m_token_stream(parent.m_token_stream)
|
||||||
, m_saved_iterator_offset(parent.m_token_stream.m_iterator_offset)
|
, m_saved_index(parent.m_token_stream.m_index)
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
StateTransaction* m_parent { nullptr };
|
StateTransaction* m_parent { nullptr };
|
||||||
TokenStream<T>& m_token_stream;
|
TokenStream<T>& m_token_stream;
|
||||||
int m_saved_iterator_offset { 0 };
|
size_t m_saved_index { 0 };
|
||||||
bool m_commit { false };
|
bool m_commit { false };
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -74,60 +75,114 @@ public:
|
||||||
TokenStream(TokenStream<T> const&) = delete;
|
TokenStream(TokenStream<T> const&) = delete;
|
||||||
TokenStream(TokenStream<T>&&) = default;
|
TokenStream(TokenStream<T>&&) = default;
|
||||||
|
|
||||||
|
// https://drafts.csswg.org/css-syntax/#token-stream-next-token
|
||||||
|
[[nodiscard]] T const& next_token() const
|
||||||
|
{
|
||||||
|
// The item of tokens at index.
|
||||||
|
// If that index would be out-of-bounds past the end of the list, it’s instead an <eof-token>.
|
||||||
|
if (m_index < m_tokens.size())
|
||||||
|
return m_tokens[m_index];
|
||||||
|
return m_eof;
|
||||||
|
}
|
||||||
|
|
||||||
|
// https://drafts.csswg.org/css-syntax/#token-stream-empty
|
||||||
|
[[nodiscard]] bool is_empty() const
|
||||||
|
{
|
||||||
|
// A token stream is empty if the next token is an <eof-token>.
|
||||||
|
return next_token().is(Token::Type::EndOfFile);
|
||||||
|
}
|
||||||
|
|
||||||
|
// https://drafts.csswg.org/css-syntax/#token-stream-consume-a-token
|
||||||
|
[[nodiscard]] T const& consume_a_token()
|
||||||
|
{
|
||||||
|
// Let token be the next token. Increment index, then return token.
|
||||||
|
auto& token = next_token();
|
||||||
|
++m_index;
|
||||||
|
return token;
|
||||||
|
}
|
||||||
|
|
||||||
|
// https://drafts.csswg.org/css-syntax/#token-stream-discard-a-token
|
||||||
|
void discard_a_token()
|
||||||
|
{
|
||||||
|
// If the token stream is not empty, increment index.
|
||||||
|
if (!is_empty())
|
||||||
|
++m_index;
|
||||||
|
}
|
||||||
|
|
||||||
|
// https://drafts.csswg.org/css-syntax/#token-stream-mark
|
||||||
|
void mark()
|
||||||
|
{
|
||||||
|
// Append index to marked indexes.
|
||||||
|
m_marked_indexes.append(m_index);
|
||||||
|
}
|
||||||
|
|
||||||
|
// https://drafts.csswg.org/css-syntax/#token-stream-restore-a-mark
|
||||||
|
void restore_a_mark()
|
||||||
|
{
|
||||||
|
// Pop from marked indexes, and set index to the popped value.
|
||||||
|
m_index = m_marked_indexes.take_last();
|
||||||
|
}
|
||||||
|
|
||||||
|
// https://drafts.csswg.org/css-syntax/#token-stream-discard-a-mark
|
||||||
|
void discard_a_mark()
|
||||||
|
{
|
||||||
|
// Pop from marked indexes, and do nothing with the popped value.
|
||||||
|
m_marked_indexes.take_last();
|
||||||
|
}
|
||||||
|
|
||||||
|
// https://drafts.csswg.org/css-syntax/#token-stream-discard-whitespace
|
||||||
|
void discard_whitespace()
|
||||||
|
{
|
||||||
|
// While the next token is a <whitespace-token>, discard a token.
|
||||||
|
while (next_token().is(Token::Type::Whitespace))
|
||||||
|
discard_a_token();
|
||||||
|
}
|
||||||
|
|
||||||
bool has_next_token()
|
bool has_next_token()
|
||||||
{
|
{
|
||||||
return (size_t)(m_iterator_offset + 1) < m_tokens.size();
|
return !is_empty();
|
||||||
}
|
|
||||||
|
|
||||||
T const& next_token()
|
|
||||||
{
|
|
||||||
if (!has_next_token())
|
|
||||||
return m_eof;
|
|
||||||
|
|
||||||
++m_iterator_offset;
|
|
||||||
|
|
||||||
return m_tokens.at(m_iterator_offset);
|
|
||||||
}
|
|
||||||
|
|
||||||
T const& peek_token(int offset = 0)
|
|
||||||
{
|
|
||||||
if (!has_next_token())
|
|
||||||
return m_eof;
|
|
||||||
|
|
||||||
return m_tokens.at(m_iterator_offset + offset + 1);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Deprecated, used in older versions of the spec.
|
||||||
T const& current_token()
|
T const& current_token()
|
||||||
{
|
{
|
||||||
if ((size_t)m_iterator_offset >= m_tokens.size())
|
if (m_index < 1 || (m_index - 1) >= m_tokens.size())
|
||||||
return m_eof;
|
return m_eof;
|
||||||
|
|
||||||
return m_tokens.at(m_iterator_offset);
|
return m_tokens.at(m_index - 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Deprecated
|
||||||
|
T const& peek_token(size_t offset = 0)
|
||||||
|
{
|
||||||
|
if (remaining_token_count() <= offset)
|
||||||
|
return m_eof;
|
||||||
|
|
||||||
|
return m_tokens.at(m_index + offset);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Deprecated, was used in older versions of the spec.
|
||||||
void reconsume_current_input_token()
|
void reconsume_current_input_token()
|
||||||
{
|
{
|
||||||
if (m_iterator_offset >= 0)
|
if (m_index > 0)
|
||||||
--m_iterator_offset;
|
--m_index;
|
||||||
}
|
}
|
||||||
|
|
||||||
StateTransaction begin_transaction() { return StateTransaction(*this); }
|
StateTransaction begin_transaction() { return StateTransaction(*this); }
|
||||||
|
|
||||||
void skip_whitespace()
|
size_t remaining_token_count() const
|
||||||
{
|
{
|
||||||
while (peek_token().is(Token::Type::Whitespace))
|
if (m_tokens.size() > m_index)
|
||||||
next_token();
|
return m_tokens.size() - m_index;
|
||||||
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
size_t token_count() const { return m_tokens.size(); }
|
|
||||||
size_t remaining_token_count() const { return token_count() - m_iterator_offset - 1; }
|
|
||||||
|
|
||||||
void dump_all_tokens()
|
void dump_all_tokens()
|
||||||
{
|
{
|
||||||
dbgln("Dumping all tokens:");
|
dbgln("Dumping all tokens:");
|
||||||
for (size_t i = 0; i < m_tokens.size(); ++i) {
|
for (size_t i = 0; i < m_tokens.size(); ++i) {
|
||||||
auto& token = m_tokens[i];
|
auto& token = m_tokens[i];
|
||||||
if ((i - 1) == (size_t)m_iterator_offset)
|
if (i == m_index)
|
||||||
dbgln("-> {}", token.to_debug_string());
|
dbgln("-> {}", token.to_debug_string());
|
||||||
else
|
else
|
||||||
dbgln(" {}", token.to_debug_string());
|
dbgln(" {}", token.to_debug_string());
|
||||||
|
@ -136,12 +191,18 @@ public:
|
||||||
|
|
||||||
void copy_state(Badge<Parser>, TokenStream<T> const& other)
|
void copy_state(Badge<Parser>, TokenStream<T> const& other)
|
||||||
{
|
{
|
||||||
m_iterator_offset = other.m_iterator_offset;
|
m_index = other.m_index;
|
||||||
}
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
// https://drafts.csswg.org/css-syntax/#token-stream-tokens
|
||||||
Span<T const> m_tokens;
|
Span<T const> m_tokens;
|
||||||
int m_iterator_offset { -1 };
|
|
||||||
|
// https://drafts.csswg.org/css-syntax/#token-stream-index
|
||||||
|
size_t m_index { 0 };
|
||||||
|
|
||||||
|
// https://drafts.csswg.org/css-syntax/#token-stream-marked-indexes
|
||||||
|
Vector<size_t> m_marked_indexes;
|
||||||
|
|
||||||
T make_eof()
|
T make_eof()
|
||||||
{
|
{
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue