From b645e26e9b29437c0e248b5e43e3ec76aacf960d Mon Sep 17 00:00:00 2001 From: Sam Atkins Date: Wed, 9 Oct 2024 12:29:29 +0100 Subject: [PATCH] LibWeb/CSS: Bring TokenStream in line with spec When the TokenStream code was originally written, there was no such concept in the CSS Syntax spec. But since then, it's been officially added, (https://drafts.csswg.org/css-syntax/#css-token-stream) and the parsing algorithms are described in terms of it. This patch brings our implementation in line with the spec. A few deprecated TokenStream methods are left around until their users are also updated to match the newer spec. There are a few differences: - They name things differently. The main confusing one is we had `next_token()` which consumed a token and returned it, but the spec has a `next_token()` which peeks the next token. The spec names are honestly better than what I'd come up with. (`discard_a_token()` is a nice addition too!) - We used to store the index of the token that was just consumed, and they instead store the index of the token that will be consumed next. This is a perfect breeding ground for off-by-one errors, so I've finally added a test suite for TokenStream itself. - We use a transaction system for rewinding, and the spec uses a stack of "marks", which can be manually rewound to. These should be able to coexist as long as we stick with marks in the parser spec algorithms, and stick with transactions elsewhere. --- .../LibWeb/GenerateCSSMathFunctions.cpp | 6 +- Tests/LibWeb/CMakeLists.txt | 1 + Tests/LibWeb/TestCSSTokenStream.cpp | 98 +++ .../LibWeb/CSS/Parser/GradientParsing.cpp | 96 +-- .../LibWeb/CSS/Parser/MediaParsing.cpp | 122 +-- .../Libraries/LibWeb/CSS/Parser/Parser.cpp | 778 +++++++++--------- .../LibWeb/CSS/Parser/SelectorParsing.cpp | 130 +-- .../Libraries/LibWeb/CSS/Parser/TokenStream.h | 135 ++- 8 files changed, 763 insertions(+), 603 deletions(-) create mode 100644 Tests/LibWeb/TestCSSTokenStream.cpp diff --git a/Meta/Lagom/Tools/CodeGenerators/LibWeb/GenerateCSSMathFunctions.cpp b/Meta/Lagom/Tools/CodeGenerators/LibWeb/GenerateCSSMathFunctions.cpp index 3ea9d420a73..d481a5692ef 100644 --- a/Meta/Lagom/Tools/CodeGenerators/LibWeb/GenerateCSSMathFunctions.cpp +++ b/Meta/Lagom/Tools/CodeGenerators/LibWeb/GenerateCSSMathFunctions.cpp @@ -97,15 +97,15 @@ namespace Web::CSS::Parser { static Optional parse_rounding_strategy(Vector const& tokens) { auto stream = TokenStream { tokens }; - stream.skip_whitespace(); + stream.discard_whitespace(); if (!stream.has_next_token()) return {}; - auto& ident = stream.next_token(); + auto& ident = stream.consume_a_token(); if (!ident.is(Token::Type::Ident)) return {}; - stream.skip_whitespace(); + stream.discard_whitespace(); if (stream.has_next_token()) return {}; diff --git a/Tests/LibWeb/CMakeLists.txt b/Tests/LibWeb/CMakeLists.txt index 59b5b337768..a3020b66afa 100644 --- a/Tests/LibWeb/CMakeLists.txt +++ b/Tests/LibWeb/CMakeLists.txt @@ -1,6 +1,7 @@ set(TEST_SOURCES TestCSSIDSpeed.cpp TestCSSPixels.cpp + TestCSSTokenStream.cpp TestFetchInfrastructure.cpp TestFetchURL.cpp TestHTMLTokenizer.cpp diff --git a/Tests/LibWeb/TestCSSTokenStream.cpp b/Tests/LibWeb/TestCSSTokenStream.cpp new file mode 100644 index 00000000000..d61f8642ec7 --- /dev/null +++ b/Tests/LibWeb/TestCSSTokenStream.cpp @@ -0,0 +1,98 @@ +/* + * Copyright (c) 2024, Sam Atkins + * + * SPDX-License-Identifier: BSD-2-Clause + */ + +#include +#include +#include +#include + +namespace Web::CSS::Parser { + +TEST_CASE(basic) +{ + Vector tokens { + Token::create_ident("hello"_fly_string), + }; + + TokenStream stream { tokens }; + EXPECT(!stream.is_empty()); + EXPECT(stream.has_next_token()); + EXPECT_EQ(stream.remaining_token_count(), 1u); + + // next_token() doesn't consume it + auto const& next = stream.next_token(); + EXPECT(!stream.is_empty()); + EXPECT(stream.has_next_token()); + EXPECT_EQ(stream.remaining_token_count(), 1u); + // Check what the token is + EXPECT(next.is(Token::Type::Ident)); + EXPECT_EQ(next.ident(), "hello"_fly_string); + + // consume_a_token() does consume it + auto const& consumed = stream.consume_a_token(); + EXPECT(stream.is_empty()); + EXPECT(!stream.has_next_token()); + EXPECT_EQ(stream.remaining_token_count(), 0u); + // Check what the token is + EXPECT(consumed.is(Token::Type::Ident)); + EXPECT_EQ(consumed.ident(), "hello"_fly_string); + + // Now, any further tokens should be EOF + EXPECT(stream.next_token().is(Token::Type::EndOfFile)); + EXPECT(stream.consume_a_token().is(Token::Type::EndOfFile)); +} + +TEST_CASE(marks) +{ + Vector tokens { + Token::create_ident("a"_fly_string), + Token::create_ident("b"_fly_string), + Token::create_ident("c"_fly_string), + Token::create_ident("d"_fly_string), + Token::create_ident("e"_fly_string), + Token::create_ident("f"_fly_string), + Token::create_ident("g"_fly_string), + }; + TokenStream stream { tokens }; + + stream.mark(); // 0 + + EXPECT_EQ(stream.remaining_token_count(), 7u); + + stream.discard_a_token(); + stream.discard_a_token(); + stream.discard_a_token(); + + EXPECT_EQ(stream.remaining_token_count(), 4u); + + stream.mark(); // 3 + + stream.discard_a_token(); + + EXPECT_EQ(stream.remaining_token_count(), 3u); + + stream.restore_a_mark(); // Back to 3 + + EXPECT_EQ(stream.remaining_token_count(), 4u); + + stream.discard_a_token(); + stream.discard_a_token(); + stream.discard_a_token(); + + EXPECT_EQ(stream.remaining_token_count(), 1u); + + stream.mark(); // 6 + + stream.discard_a_mark(); + + EXPECT_EQ(stream.remaining_token_count(), 1u); + + stream.restore_a_mark(); // Back to 0 + + EXPECT_EQ(stream.remaining_token_count(), 7u); +} + +} diff --git a/Userland/Libraries/LibWeb/CSS/Parser/GradientParsing.cpp b/Userland/Libraries/LibWeb/CSS/Parser/GradientParsing.cpp index c1ee6ac8a9f..978a5f3d33d 100644 --- a/Userland/Libraries/LibWeb/CSS/Parser/GradientParsing.cpp +++ b/Userland/Libraries/LibWeb/CSS/Parser/GradientParsing.cpp @@ -27,20 +27,20 @@ Optional> Parser::parse_color_stop_list(TokenStream ElementType { - tokens.skip_whitespace(); + tokens.discard_whitespace(); if (!tokens.has_next_token()) return ElementType::Garbage; RefPtr color; Optional position; Optional second_position; - if (auto dimension = parse_dimension(tokens.peek_token()); dimension.has_value() && is_position(*dimension)) { + if (auto dimension = parse_dimension(tokens.next_token()); dimension.has_value() && is_position(*dimension)) { // [ ] or [] position = get_position(*dimension); - (void)tokens.next_token(); // dimension - tokens.skip_whitespace(); + tokens.discard_a_token(); // dimension + tokens.discard_whitespace(); // - if (!tokens.has_next_token() || tokens.peek_token().is(Token::Type::Comma)) { + if (!tokens.has_next_token() || tokens.next_token().is(Token::Type::Comma)) { element.transition_hint = typename TElement::ColorHint { *position }; return ElementType::ColorHint; } @@ -55,16 +55,16 @@ Optional> Parser::parse_color_stop_list(TokenStream ] (double-position color stops) // Note: Double-position color stops only appear to be valid in this order. for (auto stop_position : Array { &position, &second_position }) { - if (tokens.has_next_token() && !tokens.peek_token().is(Token::Type::Comma)) { - auto dimension = parse_dimension(tokens.next_token()); + if (tokens.has_next_token() && !tokens.next_token().is(Token::Type::Comma)) { + auto dimension = parse_dimension(tokens.consume_a_token()); if (!dimension.has_value() || !is_position(*dimension)) return ElementType::Garbage; *stop_position = get_position(*dimension); - tokens.skip_whitespace(); + tokens.discard_whitespace(); } } } @@ -83,14 +83,14 @@ Optional> Parser::parse_color_stop_list(TokenStream color_stops { first_element }; while (tokens.has_next_token()) { TElement list_element {}; - tokens.skip_whitespace(); - if (!tokens.next_token().is(Token::Type::Comma)) + tokens.discard_whitespace(); + if (!tokens.consume_a_token().is(Token::Type::Comma)) return {}; auto element_type = parse_color_stop_list_element(list_element); if (element_type == ElementType::ColorHint) { // , - tokens.skip_whitespace(); - if (!tokens.next_token().is(Token::Type::Comma)) + tokens.discard_whitespace(); + if (!tokens.consume_a_token().is(Token::Type::Comma)) return {}; // Note: This fills in the color stop on the same list_element as the color hint (it does not overwrite it). if (parse_color_stop_list_element(list_element) != ElementType::ColorStop) @@ -140,7 +140,7 @@ RefPtr Parser::parse_linear_gradient_function(TokenStream Parser::parse_linear_gradient_function(TokenStream | to ]?, ) TokenStream tokens { component_value.function().values() }; - tokens.skip_whitespace(); + tokens.discard_whitespace(); if (!tokens.has_next_token()) return nullptr; @@ -194,10 +194,10 @@ RefPtr Parser::parse_linear_gradient_function(TokenStream - tokens.next_token(); + tokens.discard_a_token(); auto angle_value = first_param.token().dimension_value(); auto unit_string = first_param.token().dimension_unit(); auto angle_type = Angle::unit_from_name(unit_string); @@ -211,23 +211,23 @@ RefPtr Parser::parse_linear_gradient_function(TokenStream side_b; - if (tokens.has_next_token() && tokens.peek_token().is(Token::Type::Ident)) - side_b = to_side(tokens.next_token().token().ident()); + if (tokens.has_next_token() && tokens.next_token().is(Token::Type::Ident)) + side_b = to_side(tokens.consume_a_token().token().ident()); if (side_a.has_value() && !side_b.has_value()) { gradient_direction = *side_a; @@ -252,11 +252,11 @@ RefPtr Parser::parse_linear_gradient_function(TokenStream Parser::parse_linear_gradient_function(TokenStream Parser::parse_conic_gradient_function(TokenStream& outer_tokens) { auto transaction = outer_tokens.begin_transaction(); - auto& component_value = outer_tokens.next_token(); + auto& component_value = outer_tokens.consume_a_token(); if (!component_value.is_function()) return nullptr; @@ -287,7 +287,7 @@ RefPtr Parser::parse_conic_gradient_function(TokenStream Parser::parse_conic_gradient_function(TokenStream ]? [ at ]? ] || // , ) - auto token = tokens.peek_token(); + auto token = tokens.next_token(); bool got_from_angle = false; bool got_color_interpolation_method = false; bool got_at_position = false; @@ -305,8 +305,8 @@ RefPtr Parser::parse_conic_gradient_function(TokenStream Parser::parse_conic_gradient_function(TokenStream Parser::parse_conic_gradient_function(TokenStream Parser::parse_radial_gradient_function(TokenStream Parser::parse_radial_gradient_function(TokenStream Parser::parse_radial_gradient_function(TokenStream Optional { auto transaction = tokens.begin_transaction(); - tokens.skip_whitespace(); - auto& token = tokens.next_token(); + tokens.discard_whitespace(); + auto& token = tokens.consume_a_token(); if (!token.is(Token::Type::Ident)) return {}; auto ident = token.token().ident(); @@ -446,11 +446,11 @@ RefPtr Parser::parse_radial_gradient_function(TokenStream | // {2} auto transaction_size = tokens.begin_transaction(); - tokens.skip_whitespace(); + tokens.discard_whitespace(); if (!tokens.has_next_token()) return {}; - if (tokens.peek_token().is(Token::Type::Ident)) { - auto extent = parse_extent_keyword(tokens.next_token().token().ident()); + if (tokens.next_token().is(Token::Type::Ident)) { + auto extent = parse_extent_keyword(tokens.consume_a_token().token().ident()); if (!extent.has_value()) return {}; return commit_value(*extent, transaction_size); @@ -459,7 +459,7 @@ RefPtr Parser::parse_radial_gradient_function(TokenStream Parser::parse_radial_gradient_function(TokenStream Parser::parse_radial_gradient_function(TokenStream diff --git a/Userland/Libraries/LibWeb/CSS/Parser/MediaParsing.cpp b/Userland/Libraries/LibWeb/CSS/Parser/MediaParsing.cpp index 32349129037..71d1978a7c6 100644 --- a/Userland/Libraries/LibWeb/CSS/Parser/MediaParsing.cpp +++ b/Userland/Libraries/LibWeb/CSS/Parser/MediaParsing.cpp @@ -29,7 +29,7 @@ Vector> Parser::parse_a_media_query_list(TokenStream Parser::parse_media_query(TokenStream& // `[ not | only ]?`, Returns whether to negate the query auto parse_initial_modifier = [](auto& tokens) -> Optional { auto transaction = tokens.begin_transaction(); - tokens.skip_whitespace(); - auto& token = tokens.next_token(); + tokens.discard_whitespace(); + auto& token = tokens.consume_a_token(); if (!token.is(Token::Type::Ident)) return {}; @@ -92,11 +92,11 @@ NonnullRefPtr Parser::parse_media_query(TokenStream& }; auto media_query = MediaQuery::create(); - tokens.skip_whitespace(); + tokens.discard_whitespace(); // `` if (auto media_condition = parse_media_condition(tokens, MediaCondition::AllowOr::Yes)) { - tokens.skip_whitespace(); + tokens.discard_whitespace(); if (tokens.has_next_token()) return invalid_media_query(); media_query->m_media_condition = move(media_condition); @@ -106,13 +106,13 @@ NonnullRefPtr Parser::parse_media_query(TokenStream& // `[ not | only ]?` if (auto modifier = parse_initial_modifier(tokens); modifier.has_value()) { media_query->m_negated = modifier.value(); - tokens.skip_whitespace(); + tokens.discard_whitespace(); } // `` if (auto media_type = parse_media_type(tokens); media_type.has_value()) { media_query->m_media_type = media_type.value(); - tokens.skip_whitespace(); + tokens.discard_whitespace(); } else { return invalid_media_query(); } @@ -121,9 +121,9 @@ NonnullRefPtr Parser::parse_media_query(TokenStream& return media_query; // `[ and ]?` - if (auto maybe_and = tokens.next_token(); maybe_and.is_ident("and"sv)) { + if (auto maybe_and = tokens.consume_a_token(); maybe_and.is_ident("and"sv)) { if (auto media_condition = parse_media_condition(tokens, MediaCondition::AllowOr::No)) { - tokens.skip_whitespace(); + tokens.discard_whitespace(); if (tokens.has_next_token()) return invalid_media_query(); media_query->m_media_condition = move(media_condition); @@ -142,14 +142,14 @@ OwnPtr Parser::parse_media_condition(TokenStream { // ` | [ * | * ]` auto transaction = tokens.begin_transaction(); - tokens.skip_whitespace(); + tokens.discard_whitespace(); // ` = not ` auto parse_media_not = [&](auto& tokens) -> OwnPtr { auto local_transaction = tokens.begin_transaction(); - tokens.skip_whitespace(); + tokens.discard_whitespace(); - auto& first_token = tokens.next_token(); + auto& first_token = tokens.consume_a_token(); if (first_token.is_ident("not"sv)) { if (auto child_condition = parse_media_condition(tokens, MediaCondition::AllowOr::Yes)) { local_transaction.commit(); @@ -162,11 +162,11 @@ OwnPtr Parser::parse_media_condition(TokenStream auto parse_media_with_combinator = [&](auto& tokens, StringView combinator) -> OwnPtr { auto local_transaction = tokens.begin_transaction(); - tokens.skip_whitespace(); + tokens.discard_whitespace(); - auto& first = tokens.next_token(); + auto& first = tokens.consume_a_token(); if (first.is_ident(combinator)) { - tokens.skip_whitespace(); + tokens.discard_whitespace(); if (auto media_in_parens = parse_media_in_parens(tokens)) { local_transaction.commit(); return media_in_parens; @@ -189,7 +189,7 @@ OwnPtr Parser::parse_media_condition(TokenStream // ` [ * | * ]` if (auto maybe_media_in_parens = parse_media_in_parens(tokens)) { - tokens.skip_whitespace(); + tokens.discard_whitespace(); // Only `` if (!tokens.has_next_token()) { transaction.commit(); @@ -203,11 +203,11 @@ OwnPtr Parser::parse_media_condition(TokenStream if (auto media_and = parse_media_and(tokens)) { child_conditions.append(media_and.release_nonnull()); - tokens.skip_whitespace(); + tokens.discard_whitespace(); while (tokens.has_next_token()) { if (auto next_media_and = parse_media_and(tokens)) { child_conditions.append(next_media_and.release_nonnull()); - tokens.skip_whitespace(); + tokens.discard_whitespace(); continue; } // We failed - invalid syntax! @@ -223,11 +223,11 @@ OwnPtr Parser::parse_media_condition(TokenStream if (auto media_or = parse_media_or(tokens)) { child_conditions.append(media_or.release_nonnull()); - tokens.skip_whitespace(); + tokens.discard_whitespace(); while (tokens.has_next_token()) { if (auto next_media_or = parse_media_or(tokens)) { child_conditions.append(next_media_or.release_nonnull()); - tokens.skip_whitespace(); + tokens.discard_whitespace(); continue; } // We failed - invalid syntax! @@ -247,7 +247,7 @@ OwnPtr Parser::parse_media_condition(TokenStream Optional Parser::parse_media_feature(TokenStream& tokens) { // `[ | | ]` - tokens.skip_whitespace(); + tokens.discard_whitespace(); // ` = ` struct MediaFeatureName { @@ -260,7 +260,7 @@ Optional Parser::parse_media_feature(TokenStream& }; auto parse_mf_name = [](auto& tokens, bool allow_min_max_prefix) -> Optional { auto transaction = tokens.begin_transaction(); - auto& token = tokens.next_token(); + auto& token = tokens.consume_a_token(); if (token.is(Token::Type::Ident)) { auto name = token.token().ident(); if (auto id = media_feature_id_from_string(name); id.has_value()) { @@ -285,10 +285,10 @@ Optional Parser::parse_media_feature(TokenStream& // ` = ` auto parse_mf_boolean = [&](auto& tokens) -> Optional { auto transaction = tokens.begin_transaction(); - tokens.skip_whitespace(); + tokens.discard_whitespace(); if (auto maybe_name = parse_mf_name(tokens, false); maybe_name.has_value()) { - tokens.skip_whitespace(); + tokens.discard_whitespace(); if (!tokens.has_next_token()) { transaction.commit(); return MediaFeature::boolean(maybe_name->id); @@ -301,14 +301,14 @@ Optional Parser::parse_media_feature(TokenStream& // ` = : ` auto parse_mf_plain = [&](auto& tokens) -> Optional { auto transaction = tokens.begin_transaction(); - tokens.skip_whitespace(); + tokens.discard_whitespace(); if (auto maybe_name = parse_mf_name(tokens, true); maybe_name.has_value()) { - tokens.skip_whitespace(); - if (tokens.next_token().is(Token::Type::Colon)) { - tokens.skip_whitespace(); + tokens.discard_whitespace(); + if (tokens.consume_a_token().is(Token::Type::Colon)) { + tokens.discard_whitespace(); if (auto maybe_value = parse_media_feature_value(maybe_name->id, tokens); maybe_value.has_value()) { - tokens.skip_whitespace(); + tokens.discard_whitespace(); if (!tokens.has_next_token()) { transaction.commit(); switch (maybe_name->type) { @@ -333,9 +333,9 @@ Optional Parser::parse_media_feature(TokenStream& // = | | ` auto parse_comparison = [](auto& tokens) -> Optional { auto transaction = tokens.begin_transaction(); - tokens.skip_whitespace(); + tokens.discard_whitespace(); - auto& first = tokens.next_token(); + auto& first = tokens.consume_a_token(); if (first.is(Token::Type::Delim)) { auto first_delim = first.token().delim(); if (first_delim == '=') { @@ -343,9 +343,9 @@ Optional Parser::parse_media_feature(TokenStream& return MediaFeature::Comparison::Equal; } if (first_delim == '<') { - auto& second = tokens.peek_token(); + auto& second = tokens.next_token(); if (second.is_delim('=')) { - tokens.next_token(); + tokens.discard_a_token(); transaction.commit(); return MediaFeature::Comparison::LessThanOrEqual; } @@ -353,9 +353,9 @@ Optional Parser::parse_media_feature(TokenStream& return MediaFeature::Comparison::LessThan; } if (first_delim == '>') { - auto& second = tokens.peek_token(); + auto& second = tokens.next_token(); if (second.is_delim('=')) { - tokens.next_token(); + tokens.discard_a_token(); transaction.commit(); return MediaFeature::Comparison::GreaterThanOrEqual; } @@ -403,16 +403,16 @@ Optional Parser::parse_media_feature(TokenStream& // | ` auto parse_mf_range = [&](auto& tokens) -> Optional { auto transaction = tokens.begin_transaction(); - tokens.skip_whitespace(); + tokens.discard_whitespace(); // ` ` // NOTE: We have to check for first, since all s will also parse as . if (auto maybe_name = parse_mf_name(tokens, false); maybe_name.has_value() && media_feature_type_is_range(maybe_name->id)) { - tokens.skip_whitespace(); + tokens.discard_whitespace(); if (auto maybe_comparison = parse_comparison(tokens); maybe_comparison.has_value()) { - tokens.skip_whitespace(); + tokens.discard_whitespace(); if (auto maybe_value = parse_media_feature_value(maybe_name->id, tokens); maybe_value.has_value()) { - tokens.skip_whitespace(); + tokens.discard_whitespace(); if (!tokens.has_next_token() && !maybe_value->is_ident()) { transaction.commit(); return MediaFeature::half_range(maybe_value.release_value(), flip(maybe_comparison.release_value()), maybe_name->id); @@ -435,23 +435,23 @@ Optional Parser::parse_media_feature(TokenStream& while (tokens.has_next_token() && !maybe_name.has_value()) { if (auto maybe_comparison = parse_comparison(tokens); maybe_comparison.has_value()) { // We found a comparison, so the next non-whitespace token should be the - tokens.skip_whitespace(); + tokens.discard_whitespace(); maybe_name = parse_mf_name(tokens, false); break; } - tokens.next_token(); - tokens.skip_whitespace(); + tokens.discard_a_token(); + tokens.discard_whitespace(); } } // Now, we can parse the range properly. if (maybe_name.has_value() && media_feature_type_is_range(maybe_name->id)) { if (auto maybe_left_value = parse_media_feature_value(maybe_name->id, tokens); maybe_left_value.has_value()) { - tokens.skip_whitespace(); + tokens.discard_whitespace(); if (auto maybe_left_comparison = parse_comparison(tokens); maybe_left_comparison.has_value()) { - tokens.skip_whitespace(); - tokens.next_token(); // The which we already parsed above. - tokens.skip_whitespace(); + tokens.discard_whitespace(); + tokens.discard_a_token(); // The which we already parsed above. + tokens.discard_whitespace(); if (!tokens.has_next_token()) { transaction.commit(); @@ -459,9 +459,9 @@ Optional Parser::parse_media_feature(TokenStream& } if (auto maybe_right_comparison = parse_comparison(tokens); maybe_right_comparison.has_value()) { - tokens.skip_whitespace(); + tokens.discard_whitespace(); if (auto maybe_right_value = parse_media_feature_value(maybe_name->id, tokens); maybe_right_value.has_value()) { - tokens.skip_whitespace(); + tokens.discard_whitespace(); // For this to be valid, the following must be true: // - Comparisons must either both be >/>= or both be Parser::parse_media_feature(TokenStream& Optional Parser::parse_media_type(TokenStream& tokens) { auto transaction = tokens.begin_transaction(); - tokens.skip_whitespace(); - auto const& token = tokens.next_token(); + tokens.discard_whitespace(); + auto const& token = tokens.consume_a_token(); if (!token.is(Token::Type::Ident)) return {}; @@ -517,19 +517,19 @@ OwnPtr Parser::parse_media_in_parens(TokenStream { // ` = ( ) | ( ) | ` auto transaction = tokens.begin_transaction(); - tokens.skip_whitespace(); + tokens.discard_whitespace(); // `( ) | ( )` - auto const& first_token = tokens.peek_token(); + auto const& first_token = tokens.next_token(); if (first_token.is_block() && first_token.block().is_paren()) { TokenStream inner_token_stream { first_token.block().values() }; if (auto maybe_media_condition = parse_media_condition(inner_token_stream, MediaCondition::AllowOr::Yes)) { - tokens.next_token(); + tokens.discard_a_token(); transaction.commit(); return maybe_media_condition.release_nonnull(); } if (auto maybe_media_feature = parse_media_feature(inner_token_stream); maybe_media_feature.has_value()) { - tokens.next_token(); + tokens.discard_a_token(); transaction.commit(); return MediaCondition::from_feature(maybe_media_feature.release_value()); } @@ -553,10 +553,10 @@ Optional Parser::parse_media_feature_value(MediaFeatureID med // NOTE: Calculations are not allowed for media feature values, at least in the current spec, so we reject them. // Identifiers - if (tokens.peek_token().is(Token::Type::Ident)) { + if (tokens.next_token().is(Token::Type::Ident)) { auto transaction = tokens.begin_transaction(); - tokens.skip_whitespace(); - auto keyword = keyword_from_string(tokens.next_token().token().ident()); + tokens.discard_whitespace(); + auto keyword = keyword_from_string(tokens.consume_a_token().token().ident()); if (keyword.has_value() && media_feature_accepts_keyword(media_feature, keyword.value())) { transaction.commit(); return MediaFeatureValue(keyword.value()); @@ -568,7 +568,7 @@ Optional Parser::parse_media_feature_value(MediaFeatureID med // Boolean ( in the spec: a 1 or 0) if (media_feature_accepts_type(media_feature, MediaFeatureValueType::Boolean)) { auto transaction = tokens.begin_transaction(); - tokens.skip_whitespace(); + tokens.discard_whitespace(); if (auto integer = parse_integer(tokens); integer.has_value() && !integer->is_calculated()) { auto integer_value = integer->value(); if (integer_value == 0 || integer_value == 1) { @@ -590,7 +590,7 @@ Optional Parser::parse_media_feature_value(MediaFeatureID med // Length if (media_feature_accepts_type(media_feature, MediaFeatureValueType::Length)) { auto transaction = tokens.begin_transaction(); - tokens.skip_whitespace(); + tokens.discard_whitespace(); if (auto length = parse_length(tokens); length.has_value() && !length->is_calculated()) { transaction.commit(); return MediaFeatureValue(length->value()); @@ -600,7 +600,7 @@ Optional Parser::parse_media_feature_value(MediaFeatureID med // Ratio if (media_feature_accepts_type(media_feature, MediaFeatureValueType::Ratio)) { auto transaction = tokens.begin_transaction(); - tokens.skip_whitespace(); + tokens.discard_whitespace(); if (auto ratio = parse_ratio(tokens); ratio.has_value()) { transaction.commit(); return MediaFeatureValue(ratio.release_value()); @@ -610,7 +610,7 @@ Optional Parser::parse_media_feature_value(MediaFeatureID med // Resolution if (media_feature_accepts_type(media_feature, MediaFeatureValueType::Resolution)) { auto transaction = tokens.begin_transaction(); - tokens.skip_whitespace(); + tokens.discard_whitespace(); if (auto resolution = parse_resolution(tokens); resolution.has_value() && !resolution->is_calculated()) { transaction.commit(); return MediaFeatureValue(resolution->value()); diff --git a/Userland/Libraries/LibWeb/CSS/Parser/Parser.cpp b/Userland/Libraries/LibWeb/CSS/Parser/Parser.cpp index d53df351411..0e6e744debf 100644 --- a/Userland/Libraries/LibWeb/CSS/Parser/Parser.cpp +++ b/Userland/Libraries/LibWeb/CSS/Parser/Parser.cpp @@ -177,7 +177,7 @@ RefPtr Parser::parse_a_supports(TokenStream& tokens) auto component_values = parse_a_list_of_component_values(tokens); TokenStream token_stream { component_values }; auto maybe_condition = parse_supports_condition(token_stream); - token_stream.skip_whitespace(); + token_stream.discard_whitespace(); if (maybe_condition && !token_stream.has_next_token()) return Supports::create(m_context.realm(), maybe_condition.release_nonnull()); @@ -187,13 +187,13 @@ RefPtr Parser::parse_a_supports(TokenStream& tokens) OwnPtr Parser::parse_supports_condition(TokenStream& tokens) { auto transaction = tokens.begin_transaction(); - tokens.skip_whitespace(); + tokens.discard_whitespace(); - auto const& peeked_token = tokens.peek_token(); + auto const& peeked_token = tokens.next_token(); // `not ` if (peeked_token.is_ident("not"sv)) { - tokens.next_token(); - tokens.skip_whitespace(); + tokens.discard_a_token(); + tokens.discard_whitespace(); auto child = parse_supports_in_parens(tokens); if (!child.has_value()) return {}; @@ -223,7 +223,7 @@ OwnPtr Parser::parse_supports_condition(TokenStream Parser::parse_supports_condition(TokenStream Parser::parse_supports_condition(TokenStream Parser::parse_supports_condition(TokenStream Parser::parse_supports_in_parens(TokenStream& tokens) { // `( )` - auto const& first_token = tokens.peek_token(); + auto const& first_token = tokens.next_token(); if (first_token.is_block() && first_token.block().is_paren()) { auto transaction = tokens.begin_transaction(); - tokens.next_token(); - tokens.skip_whitespace(); + tokens.discard_a_token(); + tokens.discard_whitespace(); TokenStream child_tokens { first_token.block().values() }; if (auto condition = parse_supports_condition(child_tokens)) { @@ -294,8 +294,8 @@ Optional Parser::parse_supports_in_parens(TokenStream Parser::parse_supports_feature(TokenStream& tokens) { auto transaction = tokens.begin_transaction(); - tokens.skip_whitespace(); - auto const& first_token = tokens.next_token(); + tokens.discard_whitespace(); + auto const& first_token = tokens.consume_a_token(); // `` if (first_token.is_block() && first_token.block().is_paren()) { @@ -328,8 +328,8 @@ Optional Parser::parse_supports_feature(TokenStream Parser::parse_general_enclosed(TokenStream& tokens) { auto transaction = tokens.begin_transaction(); - tokens.skip_whitespace(); - auto const& first_token = tokens.next_token(); + tokens.discard_whitespace(); + auto const& first_token = tokens.consume_a_token(); // `[ ? ) ]` if (first_token.is_function()) { @@ -358,7 +358,7 @@ Vector> Parser::consume_a_list_of_rules(TokenStream& toke // Repeatedly consume the next input token: for (;;) { - auto& token = tokens.next_token(); + auto& token = tokens.consume_a_token(); // if (token.is(Token::Type::Whitespace)) { @@ -422,7 +422,7 @@ NonnullRefPtr Parser::consume_an_at_rule(TokenStream& tokens) // To consume an at-rule: // Consume the next input token. - auto& name_ident = tokens.next_token(); + auto& name_ident = tokens.consume_a_token(); VERIFY(name_ident.is(Token::Type::AtKeyword)); // Create a new at-rule with its name set to the value of the current input token, its prelude initially set to an empty list, and its value initially set to nothing. @@ -433,7 +433,7 @@ NonnullRefPtr Parser::consume_an_at_rule(TokenStream& tokens) // Repeatedly consume the next input token: for (;;) { - auto& token = tokens.next_token(); + auto& token = tokens.consume_a_token(); // if (token.is(Token::Type::Semicolon)) { @@ -489,7 +489,7 @@ RefPtr Parser::consume_a_qualified_rule(TokenStream& tokens) // Repeatedly consume the next input token: for (;;) { - auto& token = tokens.next_token(); + auto& token = tokens.consume_a_token(); // if (token.is(Token::Type::EndOfFile)) { @@ -538,7 +538,7 @@ Vector Parser::consume_a_style_blocks_contents(TokenStream< // Repeatedly consume the next input token: for (;;) { - auto& token = tokens.next_token(); + auto& token = tokens.consume_a_token(); // // @@ -573,7 +573,7 @@ Vector Parser::consume_a_style_blocks_contents(TokenStream< // As long as the next input token is anything other than a or , // consume a component value and append it to the temporary list. for (;;) { - auto& next_input_token = tokens.peek_token(); + auto& next_input_token = tokens.next_token(); if (next_input_token.is(Token::Type::Semicolon) || next_input_token.is(Token::Type::EndOfFile)) break; temporary_list.append(consume_a_component_value(tokens)); @@ -610,7 +610,7 @@ Vector Parser::consume_a_style_blocks_contents(TokenStream< // As long as the next input token is anything other than a or , // consume a component value and throw away the returned value. for (;;) { - auto& peek = tokens.peek_token(); + auto& peek = tokens.next_token(); if (peek.is(Token::Type::Semicolon) || peek.is(Token::Type::EndOfFile)) break; (void)consume_a_component_value(tokens); @@ -624,7 +624,7 @@ ComponentValue Parser::consume_a_component_value(TokenStream& to { // Note: This overload is called once tokens have already been converted into component values, // so we do not need to do the work in the more general overload. - return tokens.next_token(); + return tokens.consume_a_token(); } // 5.4.7. Consume a component value @@ -635,7 +635,7 @@ ComponentValue Parser::consume_a_component_value(TokenStream& tokens) // To consume a component value: // Consume the next input token. - auto& token = tokens.next_token(); + auto& token = tokens.consume_a_token(); // If the current input token is a <{-token>, <[-token>, or <(-token>, consume a simple block and return it. if (token.is(Token::Type::OpenCurly) || token.is(Token::Type::OpenSquare) || token.is(Token::Type::OpenParen)) @@ -671,7 +671,7 @@ NonnullRefPtr Parser::consume_a_simple_block(TokenStream& tokens) // Repeatedly consume the next input token and process it as follows: for (;;) { - auto& token = tokens.next_token(); + auto& token = tokens.consume_a_token(); // ending token if (token.is(ending_token)) { @@ -715,7 +715,7 @@ NonnullRefPtr Parser::consume_a_function(TokenStream& tokens) // Repeatedly consume the next input token and process it as follows: for (;;) { - auto& token = tokens.next_token(); + auto& token = tokens.consume_a_token(); // <)-token> if (token.is(Token::Type::CloseParen)) { @@ -755,8 +755,8 @@ Optional Parser::consume_a_declaration(TokenStream& tokens) // Consume the next input token. auto transaction = tokens.begin_transaction(); - tokens.skip_whitespace(); - auto& token = tokens.next_token(); + tokens.discard_whitespace(); + auto& token = tokens.consume_a_token(); // NOTE: Not to spec, handle the case where the input token *isn't* an . if (!token.is(Token::Type::Ident)) @@ -770,25 +770,25 @@ Optional Parser::consume_a_declaration(TokenStream& tokens) Important declaration_important = Important::No; // 1. While the next input token is a , consume the next input token. - tokens.skip_whitespace(); + tokens.discard_whitespace(); // 2. If the next input token is anything other than a , this is a parse error. // Return nothing. - auto& maybe_colon = tokens.peek_token(); + auto& maybe_colon = tokens.next_token(); if (!maybe_colon.is(Token::Type::Colon)) { log_parse_error(); return {}; } // Otherwise, consume the next input token. - tokens.next_token(); + tokens.discard_a_token(); // 3. While the next input token is a , consume the next input token. - tokens.skip_whitespace(); + tokens.discard_whitespace(); // 4. As long as the next input token is anything other than an , consume a // component value and append it to the declaration’s value. for (;;) { - if (tokens.peek_token().is(Token::Type::EndOfFile)) { + if (tokens.next_token().is(Token::Type::EndOfFile)) { break; } declaration_values.append(consume_a_component_value(tokens)); @@ -860,7 +860,7 @@ Vector Parser::consume_a_list_of_declarations(TokenStream // @@ -894,7 +894,7 @@ Vector Parser::consume_a_list_of_declarations(TokenStream or , // consume a component value and append it to the temporary list. for (;;) { - auto& peek = tokens.peek_token(); + auto& peek = tokens.next_token(); if (peek.is(Token::Type::Semicolon) || peek.is(Token::Type::EndOfFile)) break; temporary_list.append(consume_a_component_value(tokens)); @@ -919,7 +919,7 @@ Vector Parser::consume_a_list_of_declarations(TokenStream or , // consume a component value and throw away the returned value. for (;;) { - auto& peek = tokens.peek_token(); + auto& peek = tokens.next_token(); if (peek.is(Token::Type::Semicolon) || peek.is(Token::Type::EndOfFile)) break; dbgln_if(CSS_PARSER_DEBUG, "Discarding token: '{}'", peek.to_debug_string()); @@ -949,10 +949,10 @@ RefPtr Parser::parse_a_rule(TokenStream& tokens) // Note: This is done when initializing the Parser. // 2. While the next input token from input is a , consume the next input token from input. - tokens.skip_whitespace(); + tokens.discard_whitespace(); // 3. If the next input token from input is an , return a syntax error. - auto& token = tokens.peek_token(); + auto& token = tokens.next_token(); if (token.is(Token::Type::EndOfFile)) { return {}; } @@ -970,10 +970,10 @@ RefPtr Parser::parse_a_rule(TokenStream& tokens) } // 4. While the next input token from input is a , consume the next input token from input. - tokens.skip_whitespace(); + tokens.discard_whitespace(); // 5. If the next input token from input is an , return rule. Otherwise, return a syntax error. - if (tokens.peek_token().is(Token::Type::EndOfFile)) + if (tokens.next_token().is(Token::Type::EndOfFile)) return rule; return {}; } @@ -1016,10 +1016,10 @@ Optional Parser::parse_a_declaration(TokenStream& tokens) // Note: This is done when initializing the Parser. // 2. While the next input token from input is a , consume the next input token. - tokens.skip_whitespace(); + tokens.discard_whitespace(); // 3. If the next input token from input is not an , return a syntax error. - auto& token = tokens.peek_token(); + auto& token = tokens.next_token(); if (!token.is(Token::Type::Ident)) { return {}; } @@ -1074,20 +1074,20 @@ Optional Parser::parse_a_component_value(TokenStream& tokens) // Note: This is done when initializing the Parser. // 2. While the next input token from input is a , consume the next input token from input. - tokens.skip_whitespace(); + tokens.discard_whitespace(); // 3. If the next input token from input is an , return a syntax error. - if (tokens.peek_token().is(Token::Type::EndOfFile)) + if (tokens.next_token().is(Token::Type::EndOfFile)) return {}; // 4. Consume a component value from input and let value be the return value. auto value = consume_a_component_value(tokens); // 5. While the next input token from input is a , consume the next input token. - tokens.skip_whitespace(); + tokens.discard_whitespace(); // 6. If the next input token from input is an , return value. Otherwise, return a syntax error. - if (tokens.peek_token().is(Token::Type::EndOfFile)) + if (tokens.next_token().is(Token::Type::EndOfFile)) return value; return {}; } @@ -1106,7 +1106,7 @@ Vector Parser::parse_a_list_of_component_values(TokenStream& Vector component_values; for (;;) { - if (tokens.peek_token().is(Token::Type::EndOfFile)) { + if (tokens.next_token().is(Token::Type::EndOfFile)) { break; } @@ -1166,7 +1166,7 @@ ElementInlineCSSStyleDeclaration* Parser::parse_as_style_attribute(DOM::Element& Optional Parser::parse_url_function(TokenStream& tokens) { auto transaction = tokens.begin_transaction(); - auto& component_value = tokens.next_token(); + auto& component_value = tokens.consume_a_token(); auto convert_string_to_url = [&](StringView url_string) -> Optional { auto url = m_context.complete_url(url_string); @@ -1210,7 +1210,7 @@ RefPtr Parser::parse_url_value(TokenStream& token RefPtr Parser::parse_basic_shape_value(TokenStream& tokens) { auto transaction = tokens.begin_transaction(); - auto& component_value = tokens.next_token(); + auto& component_value = tokens.consume_a_token(); if (!component_value.is_function()) return nullptr; @@ -1229,17 +1229,17 @@ RefPtr Parser::parse_basic_shape_value(TokenStream Parser::parse_layer_name(TokenStream& tokens }; auto transaction = tokens.begin_transaction(); - tokens.skip_whitespace(); + tokens.discard_whitespace(); if (!tokens.has_next_token() && allow_blank_layer_name == AllowBlankLayerName::Yes) { // No name present, just return a blank one return FlyString(); } - auto& first_name_token = tokens.next_token(); + auto& first_name_token = tokens.consume_a_token(); if (!is_valid_layer_name_part(first_name_token)) return {}; @@ -1276,11 +1276,11 @@ Optional Parser::parse_layer_name(TokenStream& tokens while (tokens.has_next_token()) { // Repeatedly parse `'.' ` - if (!tokens.peek_token().is_delim('.')) + if (!tokens.next_token().is_delim('.')) break; - (void)tokens.next_token(); // '.' + tokens.discard_a_token(); // '.' - auto& name_token = tokens.next_token(); + auto& name_token = tokens.consume_a_token(); if (!is_valid_layer_name_part(name_token)) return {}; builder.appendff(".{}", name_token.token().ident()); @@ -1381,18 +1381,18 @@ JS::GCPtr Parser::convert_to_import_rule(Rule& rule) } TokenStream tokens { rule.prelude() }; - tokens.skip_whitespace(); + tokens.discard_whitespace(); Optional url = parse_url_function(tokens); - if (!url.has_value() && tokens.peek_token().is(Token::Type::String)) - url = m_context.complete_url(tokens.next_token().token().string()); + if (!url.has_value() && tokens.next_token().is(Token::Type::String)) + url = m_context.complete_url(tokens.consume_a_token().token().string()); if (!url.has_value()) { - dbgln_if(CSS_PARSER_DEBUG, "Failed to parse @import rule: Unable to parse `{}` as URL.", tokens.peek_token().to_debug_string()); + dbgln_if(CSS_PARSER_DEBUG, "Failed to parse @import rule: Unable to parse `{}` as URL.", tokens.next_token().to_debug_string()); return {}; } - tokens.skip_whitespace(); + tokens.discard_whitespace(); // TODO: Support layers and import-conditions if (tokens.has_next_token()) { if constexpr (CSS_PARSER_DEBUG) { @@ -1424,7 +1424,7 @@ JS::GCPtr Parser::convert_to_layer_rule(Rule& rule) return {}; } - prelude_tokens.skip_whitespace(); + prelude_tokens.discard_whitespace(); if (prelude_tokens.has_next_token()) { dbgln_if(CSS_PARSER_DEBUG, "CSSParser: @layer has invalid prelude, (tokens after layer name) prelude = {}; discarding.", rule.prelude()); return {}; @@ -1445,16 +1445,16 @@ JS::GCPtr Parser::convert_to_layer_rule(Rule& rule) // CSSLayerStatementRule // @layer #; auto tokens = TokenStream { rule.prelude() }; - tokens.skip_whitespace(); + tokens.discard_whitespace(); Vector layer_names; while (tokens.has_next_token()) { // Comma if (!layer_names.is_empty()) { - if (auto comma = tokens.next_token(); !comma.is(Token::Type::Comma)) { + if (auto comma = tokens.consume_a_token(); !comma.is(Token::Type::Comma)) { dbgln_if(CSS_PARSER_DEBUG, "CSSParser: @layer missing separating comma, ({}) prelude = {}; discarding.", comma.to_debug_string(), rule.prelude()); return {}; } - tokens.skip_whitespace(); + tokens.discard_whitespace(); } if (auto name = parse_layer_name(tokens, AllowBlankLayerName::No); name.has_value()) { @@ -1463,7 +1463,7 @@ JS::GCPtr Parser::convert_to_layer_rule(Rule& rule) dbgln_if(CSS_PARSER_DEBUG, "CSSParser: @layer contains invalid name, prelude = {}; discarding.", rule.prelude()); return {}; } - tokens.skip_whitespace(); + tokens.discard_whitespace(); } if (layer_names.is_empty()) { @@ -1489,15 +1489,15 @@ JS::GCPtr Parser::convert_to_keyframes_rule(Rule& rule) } auto prelude_stream = TokenStream { rule.prelude() }; - prelude_stream.skip_whitespace(); - auto& token = prelude_stream.next_token(); + prelude_stream.discard_whitespace(); + auto& token = prelude_stream.consume_a_token(); if (!token.is_token()) { dbgln_if(CSS_PARSER_DEBUG, "CSSParser: @keyframes has invalid prelude, prelude = {}; discarding.", rule.prelude()); return {}; } auto name_token = token.token(); - prelude_stream.skip_whitespace(); + prelude_stream.discard_whitespace(); if (prelude_stream.has_next_token()) { dbgln_if(CSS_PARSER_DEBUG, "CSSParser: @keyframes has invalid prelude, prelude = {}; discarding.", rule.prelude()); @@ -1520,7 +1520,7 @@ JS::GCPtr Parser::convert_to_keyframes_rule(Rule& rule) JS::MarkedVector keyframes(m_context.realm().heap()); while (child_tokens.has_next_token()) { - child_tokens.skip_whitespace(); + child_tokens.discard_whitespace(); // keyframe-selector = | // keyframe-keyword = "from" | "to" // selector = # @@ -1529,10 +1529,10 @@ JS::GCPtr Parser::convert_to_keyframes_rule(Rule& rule) auto selectors = Vector {}; while (child_tokens.has_next_token()) { - child_tokens.skip_whitespace(); + child_tokens.discard_whitespace(); if (!child_tokens.has_next_token()) break; - auto tok = child_tokens.next_token(); + auto tok = child_tokens.consume_a_token(); if (!tok.is_token()) { dbgln_if(CSS_PARSER_DEBUG, "CSSParser: @keyframes rule has invalid selector: {}; discarding.", tok.to_debug_string()); child_tokens.reconsume_current_input_token(); @@ -1555,8 +1555,8 @@ JS::GCPtr Parser::convert_to_keyframes_rule(Rule& rule) } if (read_a_selector) { - child_tokens.skip_whitespace(); - if (child_tokens.next_token().is(Token::Type::Comma)) + child_tokens.discard_whitespace(); + if (child_tokens.consume_a_token().is(Token::Type::Comma)) continue; } @@ -1567,8 +1567,8 @@ JS::GCPtr Parser::convert_to_keyframes_rule(Rule& rule) if (!child_tokens.has_next_token()) break; - child_tokens.skip_whitespace(); - auto token = child_tokens.next_token(); + child_tokens.discard_whitespace(); + auto token = child_tokens.consume_a_token(); if (token.is_block()) { auto block_tokens = token.block().values(); auto block_stream = TokenStream { block_tokens }; @@ -1604,25 +1604,25 @@ JS::GCPtr Parser::convert_to_namespace_rule(Rule& rule) } auto tokens = TokenStream { rule.prelude() }; - tokens.skip_whitespace(); + tokens.discard_whitespace(); Optional prefix = {}; - if (tokens.peek_token().is(Token::Type::Ident)) { - prefix = tokens.next_token().token().ident(); - tokens.skip_whitespace(); + if (tokens.next_token().is(Token::Type::Ident)) { + prefix = tokens.consume_a_token().token().ident(); + tokens.discard_whitespace(); } FlyString namespace_uri; if (auto url = parse_url_function(tokens); url.has_value()) { namespace_uri = MUST(url.value().to_string()); - } else if (auto& url_token = tokens.next_token(); url_token.is(Token::Type::String)) { + } else if (auto& url_token = tokens.consume_a_token(); url_token.is(Token::Type::String)) { namespace_uri = url_token.token().string(); } else { - dbgln_if(CSS_PARSER_DEBUG, "Failed to parse @namespace rule: Unable to parse `{}` as URL.", tokens.peek_token().to_debug_string()); + dbgln_if(CSS_PARSER_DEBUG, "Failed to parse @namespace rule: Unable to parse `{}` as URL.", tokens.next_token().to_debug_string()); return {}; } - tokens.skip_whitespace(); + tokens.discard_whitespace(); if (tokens.has_next_token()) { if constexpr (CSS_PARSER_DEBUG) { dbgln("Failed to parse @namespace rule: Trailing tokens after URL."); @@ -1739,7 +1739,7 @@ Optional Parser::convert_to_style_property(Declaration const& dec RefPtr Parser::parse_builtin_value(TokenStream& tokens) { auto transaction = tokens.begin_transaction(); - auto& component_value = tokens.next_token(); + auto& component_value = tokens.consume_a_token(); if (component_value.is(Token::Type::Ident)) { auto ident = component_value.token().ident(); if (ident.equals_ignoring_ascii_case("inherit"sv)) { @@ -1771,9 +1771,9 @@ RefPtr Parser::parse_builtin_value(TokenStream& t RefPtr Parser::parse_custom_ident_value(TokenStream& tokens, std::initializer_list blacklist) { auto transaction = tokens.begin_transaction(); - tokens.skip_whitespace(); + tokens.discard_whitespace(); - auto token = tokens.next_token(); + auto token = tokens.consume_a_token(); if (!token.is(Token::Type::Ident)) return nullptr; auto custom_ident = token.token().ident(); @@ -1874,7 +1874,7 @@ Optional Parser::parse_dimension(ComponentValue const& component_valu Optional Parser::parse_angle(TokenStream& tokens) { auto transaction = tokens.begin_transaction(); - auto& token = tokens.next_token(); + auto& token = tokens.consume_a_token(); if (auto dimension = parse_dimension(token); dimension.has_value()) { if (dimension->is_angle()) { @@ -1895,7 +1895,7 @@ Optional Parser::parse_angle(TokenStream& tok Optional Parser::parse_angle_percentage(TokenStream& tokens) { auto transaction = tokens.begin_transaction(); - auto& token = tokens.next_token(); + auto& token = tokens.consume_a_token(); if (auto dimension = parse_dimension(token); dimension.has_value()) { if (dimension->is_angle_percentage()) { @@ -1916,7 +1916,7 @@ Optional Parser::parse_angle_percentage(TokenStream Parser::parse_flex(TokenStream& tokens) { auto transaction = tokens.begin_transaction(); - auto& token = tokens.next_token(); + auto& token = tokens.consume_a_token(); if (auto dimension = parse_dimension(token); dimension.has_value()) { if (dimension->is_flex()) { @@ -1937,7 +1937,7 @@ Optional Parser::parse_flex(TokenStream& token Optional Parser::parse_frequency(TokenStream& tokens) { auto transaction = tokens.begin_transaction(); - auto& token = tokens.next_token(); + auto& token = tokens.consume_a_token(); if (auto dimension = parse_dimension(token); dimension.has_value()) { if (dimension->is_frequency()) { @@ -1958,7 +1958,7 @@ Optional Parser::parse_frequency(TokenStream Parser::parse_frequency_percentage(TokenStream& tokens) { auto transaction = tokens.begin_transaction(); - auto& token = tokens.next_token(); + auto& token = tokens.consume_a_token(); if (auto dimension = parse_dimension(token); dimension.has_value()) { if (dimension->is_frequency_percentage()) { @@ -1979,7 +1979,7 @@ Optional Parser::parse_frequency_percentage(TokenStream Parser::parse_integer(TokenStream& tokens) { auto transaction = tokens.begin_transaction(); - auto& token = tokens.next_token(); + auto& token = tokens.consume_a_token(); if (token.is(Token::Type::Number) && token.token().number().is_integer()) { transaction.commit(); @@ -1997,7 +1997,7 @@ Optional Parser::parse_integer(TokenStream& Optional Parser::parse_length(TokenStream& tokens) { auto transaction = tokens.begin_transaction(); - auto& token = tokens.next_token(); + auto& token = tokens.consume_a_token(); if (auto dimension = parse_dimension(token); dimension.has_value()) { if (dimension->is_length()) { @@ -2018,7 +2018,7 @@ Optional Parser::parse_length(TokenStream& t Optional Parser::parse_length_percentage(TokenStream& tokens) { auto transaction = tokens.begin_transaction(); - auto& token = tokens.next_token(); + auto& token = tokens.consume_a_token(); if (auto dimension = parse_dimension(token); dimension.has_value()) { if (dimension->is_length_percentage()) { @@ -2039,7 +2039,7 @@ Optional Parser::parse_length_percentage(TokenStream Parser::parse_number(TokenStream& tokens) { auto transaction = tokens.begin_transaction(); - auto& token = tokens.next_token(); + auto& token = tokens.consume_a_token(); if (token.is(Token::Type::Number)) { transaction.commit(); @@ -2057,7 +2057,7 @@ Optional Parser::parse_number(TokenStream& t Optional Parser::parse_resolution(TokenStream& tokens) { auto transaction = tokens.begin_transaction(); - auto& token = tokens.next_token(); + auto& token = tokens.consume_a_token(); if (auto dimension = parse_dimension(token); dimension.has_value()) { if (dimension->is_resolution()) { @@ -2078,7 +2078,7 @@ Optional Parser::parse_resolution(TokenStream Parser::parse_time(TokenStream& tokens) { auto transaction = tokens.begin_transaction(); - auto& token = tokens.next_token(); + auto& token = tokens.consume_a_token(); if (auto dimension = parse_dimension(token); dimension.has_value()) { if (dimension->is_time()) { @@ -2099,7 +2099,7 @@ Optional Parser::parse_time(TokenStream& token Optional Parser::parse_time_percentage(TokenStream& tokens) { auto transaction = tokens.begin_transaction(); - auto& token = tokens.next_token(); + auto& token = tokens.consume_a_token(); if (auto dimension = parse_dimension(token); dimension.has_value()) { if (dimension->is_time_percentage()) { @@ -2119,8 +2119,8 @@ Optional Parser::parse_time_percentage(TokenStream Parser::parse_source_size_value(TokenStream& tokens) { - if (tokens.peek_token().is_ident("auto"sv)) { - (void)tokens.next_token(); // auto + if (tokens.next_token().is_ident("auto"sv)) { + tokens.discard_a_token(); // auto return LengthOrCalculated { Length::make_auto() }; } @@ -2130,7 +2130,7 @@ Optional Parser::parse_source_size_value(TokenStream Parser::parse_ratio(TokenStream& tokens) { auto transaction = tokens.begin_transaction(); - tokens.skip_whitespace(); + tokens.discard_whitespace(); auto read_number_value = [this](ComponentValue const& component_value) -> Optional { if (component_value.is(Token::Type::Number)) { @@ -2147,17 +2147,17 @@ Optional Parser::parse_ratio(TokenStream& tokens) }; // ` = [ / ]?` - auto maybe_numerator = read_number_value(tokens.next_token()); + auto maybe_numerator = read_number_value(tokens.consume_a_token()); if (!maybe_numerator.has_value() || maybe_numerator.value() < 0) return {}; auto numerator = maybe_numerator.value(); { auto two_value_transaction = tokens.begin_transaction(); - tokens.skip_whitespace(); - auto solidus = tokens.next_token(); - tokens.skip_whitespace(); - auto maybe_denominator = read_number_value(tokens.next_token()); + tokens.discard_whitespace(); + auto solidus = tokens.consume_a_token(); + tokens.discard_whitespace(); + auto maybe_denominator = read_number_value(tokens.consume_a_token()); if (solidus.is_delim('/') && maybe_denominator.has_value() && maybe_denominator.value() >= 0) { auto denominator = maybe_denominator.value(); @@ -2177,7 +2177,7 @@ Optional Parser::parse_ratio(TokenStream& tokens) Optional Parser::parse_unicode_range(TokenStream& tokens) { auto transaction = tokens.begin_transaction(); - tokens.skip_whitespace(); + tokens.discard_whitespace(); // = // u '+' '?'* | @@ -2210,13 +2210,13 @@ Optional Parser::parse_unicode_range(TokenStream does not start with 'u'"); return {}; } - auto const& second_token = tokens.next_token(); + auto const& second_token = tokens.consume_a_token(); // u '+' '?'* | // u '+' '?'+ @@ -2225,12 +2225,12 @@ Optional Parser::parse_unicode_range(TokenStream Parser::parse_unicode_range(TokenStream Parser::parse_unicode_range(TokenStream Parser::parse_unicode_ranges(TokenStream Parser::parse_dimension_value(TokenStream& tokens) { - if (auto dimension = parse_dimension(tokens.peek_token()); dimension.has_value()) { - (void)tokens.next_token(); // dimension + if (auto dimension = parse_dimension(tokens.next_token()); dimension.has_value()) { + tokens.discard_a_token(); // dimension if (dimension->is_angle()) return AngleStyleValue::create(dimension->angle()); @@ -2453,8 +2453,8 @@ RefPtr Parser::parse_dimension_value(TokenStream& VERIFY_NOT_REACHED(); } - if (auto calc = parse_calculated_value(tokens.peek_token()); calc && calc->resolves_to_dimension()) { - (void)tokens.next_token(); // calc + if (auto calc = parse_calculated_value(tokens.next_token()); calc && calc->resolves_to_dimension()) { + tokens.discard_a_token(); // calc return calc; } @@ -2463,13 +2463,13 @@ RefPtr Parser::parse_dimension_value(TokenStream& RefPtr Parser::parse_integer_value(TokenStream& tokens) { - auto peek_token = tokens.peek_token(); + auto peek_token = tokens.next_token(); if (peek_token.is(Token::Type::Number) && peek_token.token().number().is_integer()) { - (void)tokens.next_token(); // integer + tokens.discard_a_token(); // integer return IntegerStyleValue::create(peek_token.token().number().integer_value()); } if (auto calc = parse_calculated_value(peek_token); calc && calc->resolves_to_number()) { - (void)tokens.next_token(); // calc + tokens.discard_a_token(); // calc return calc; } @@ -2478,13 +2478,13 @@ RefPtr Parser::parse_integer_value(TokenStream& t RefPtr Parser::parse_number_value(TokenStream& tokens) { - auto peek_token = tokens.peek_token(); + auto peek_token = tokens.next_token(); if (peek_token.is(Token::Type::Number)) { - (void)tokens.next_token(); // number + tokens.discard_a_token(); // number return NumberStyleValue::create(peek_token.token().number().value()); } if (auto calc = parse_calculated_value(peek_token); calc && calc->resolves_to_number()) { - (void)tokens.next_token(); // calc + tokens.discard_a_token(); // calc return calc; } @@ -2493,17 +2493,17 @@ RefPtr Parser::parse_number_value(TokenStream& to RefPtr Parser::parse_number_percentage_value(TokenStream& tokens) { - auto peek_token = tokens.peek_token(); + auto peek_token = tokens.next_token(); if (peek_token.is(Token::Type::Number)) { - (void)tokens.next_token(); // number + tokens.discard_a_token(); // number return NumberStyleValue::create(peek_token.token().number().value()); } if (peek_token.is(Token::Type::Percentage)) { - (void)tokens.next_token(); // percentage + tokens.discard_a_token(); // percentage return PercentageStyleValue::create(Percentage(peek_token.token().percentage())); } if (auto calc = parse_calculated_value(peek_token); calc && calc->resolves_to_number_percentage()) { - (void)tokens.next_token(); // calc + tokens.discard_a_token(); // calc return calc; } @@ -2512,13 +2512,13 @@ RefPtr Parser::parse_number_percentage_value(TokenStream Parser::parse_percentage_value(TokenStream& tokens) { - auto peek_token = tokens.peek_token(); + auto peek_token = tokens.next_token(); if (peek_token.is(Token::Type::Percentage)) { - (void)tokens.next_token(); // percentage + tokens.discard_a_token(); // percentage return PercentageStyleValue::create(Percentage(peek_token.token().percentage())); } if (auto calc = parse_calculated_value(peek_token); calc && calc->resolves_to_percentage()) { - (void)tokens.next_token(); // calc + tokens.discard_a_token(); // calc return calc; } @@ -2657,11 +2657,11 @@ RefPtr Parser::parse_time_percentage_value(TokenStream Parser::parse_keyword_value(TokenStream& tokens) { - auto peek_token = tokens.peek_token(); + auto peek_token = tokens.next_token(); if (peek_token.is(Token::Type::Ident)) { auto keyword = keyword_from_string(peek_token.token().ident()); if (keyword.has_value()) { - (void)tokens.next_token(); // ident + tokens.discard_a_token(); // ident return CSSKeywordValue::create(keyword.value()); } } @@ -2673,7 +2673,7 @@ RefPtr Parser::parse_keyword_value(TokenStream& t RefPtr Parser::parse_rect_value(TokenStream& tokens) { auto transaction = tokens.begin_transaction(); - auto function_token = tokens.next_token(); + auto function_token = tokens.consume_a_token(); if (!function_token.is_function("rect"sv)) return nullptr; @@ -2699,12 +2699,12 @@ RefPtr Parser::parse_rect_value(TokenStream& toke // and specify offsets from the top border edge of the box, and , and // specify offsets from the left border edge of the box. for (size_t side = 0; side < 4; side++) { - argument_tokens.skip_whitespace(); + argument_tokens.discard_whitespace(); // , , , and may either have a value or 'auto'. // Negative lengths are permitted. - if (argument_tokens.peek_token().is_ident("auto"sv)) { - (void)argument_tokens.next_token(); // `auto` + if (argument_tokens.next_token().is_ident("auto"sv)) { + (void)argument_tokens.consume_a_token(); // `auto` params.append(Length::make_auto()); } else { auto maybe_length = parse_length(argument_tokens); @@ -2716,7 +2716,7 @@ RefPtr Parser::parse_rect_value(TokenStream& toke } params.append(maybe_length.value().value()); } - argument_tokens.skip_whitespace(); + argument_tokens.discard_whitespace(); // The last side, should be no more tokens following it. if (static_cast(side) == Side::Left) { @@ -2725,7 +2725,7 @@ RefPtr Parser::parse_rect_value(TokenStream& toke break; } - bool next_is_comma = argument_tokens.peek_token().is(Token::Type::Comma); + bool next_is_comma = argument_tokens.next_token().is(Token::Type::Comma); // Authors should separate offset values with commas. User agents must support separation // with commas, but may also support separation without commas (but not a combination), @@ -2735,7 +2735,7 @@ RefPtr Parser::parse_rect_value(TokenStream& toke if (comma_requirement == CommaRequirement::RequiresCommas) { if (next_is_comma) - argument_tokens.next_token(); + argument_tokens.discard_a_token(); else return nullptr; } else if (comma_requirement == CommaRequirement::RequiresNoCommas) { @@ -2769,14 +2769,14 @@ RefPtr Parser::parse_solidus_and_alpha_value(TokenStream Parser::parse_rgb_color_value(TokenStream& // TODO: Handle none values auto transaction = outer_tokens.begin_transaction(); - outer_tokens.skip_whitespace(); + outer_tokens.discard_whitespace(); - auto& function_token = outer_tokens.next_token(); + auto& function_token = outer_tokens.consume_a_token(); if (!function_token.is_function("rgb"sv) && !function_token.is_function("rgba"sv)) return {}; @@ -2812,49 +2812,49 @@ RefPtr Parser::parse_rgb_color_value(TokenStream& RefPtr alpha; auto inner_tokens = TokenStream { function_token.function().values() }; - inner_tokens.skip_whitespace(); + inner_tokens.discard_whitespace(); red = parse_number_percentage_value(inner_tokens); if (!red) return {}; - inner_tokens.skip_whitespace(); - bool legacy_syntax = inner_tokens.peek_token().is(Token::Type::Comma); + inner_tokens.discard_whitespace(); + bool legacy_syntax = inner_tokens.next_token().is(Token::Type::Comma); if (legacy_syntax) { // Legacy syntax // #{3} , ? // | #{3} , ? // So, r/g/b can be numbers or percentages, as long as they're all the same type. - inner_tokens.next_token(); // comma - inner_tokens.skip_whitespace(); + inner_tokens.discard_a_token(); // comma + inner_tokens.discard_whitespace(); green = parse_number_percentage_value(inner_tokens); if (!green) return {}; - inner_tokens.skip_whitespace(); + inner_tokens.discard_whitespace(); - if (!inner_tokens.next_token().is(Token::Type::Comma)) + if (!inner_tokens.consume_a_token().is(Token::Type::Comma)) return {}; - inner_tokens.skip_whitespace(); + inner_tokens.discard_whitespace(); blue = parse_number_percentage_value(inner_tokens); if (!blue) return {}; - inner_tokens.skip_whitespace(); + inner_tokens.discard_whitespace(); if (inner_tokens.has_next_token()) { // Try and read comma and alpha - if (!inner_tokens.next_token().is(Token::Type::Comma)) + if (!inner_tokens.consume_a_token().is(Token::Type::Comma)) return {}; - inner_tokens.skip_whitespace(); + inner_tokens.discard_whitespace(); alpha = parse_number_percentage_value(inner_tokens); if (!alpha) return {}; - inner_tokens.skip_whitespace(); + inner_tokens.discard_whitespace(); if (inner_tokens.has_next_token()) return {}; @@ -2878,12 +2878,12 @@ RefPtr Parser::parse_rgb_color_value(TokenStream& green = parse_number_percentage_value(inner_tokens); if (!green) return {}; - inner_tokens.skip_whitespace(); + inner_tokens.discard_whitespace(); blue = parse_number_percentage_value(inner_tokens); if (!blue) return {}; - inner_tokens.skip_whitespace(); + inner_tokens.discard_whitespace(); if (inner_tokens.has_next_token()) { alpha = parse_solidus_and_alpha_value(inner_tokens); @@ -2919,9 +2919,9 @@ RefPtr Parser::parse_hsl_color_value(TokenStream& // TODO: Handle none values auto transaction = outer_tokens.begin_transaction(); - outer_tokens.skip_whitespace(); + outer_tokens.discard_whitespace(); - auto& function_token = outer_tokens.next_token(); + auto& function_token = outer_tokens.consume_a_token(); if (!function_token.is_function("hsl"sv) && !function_token.is_function("hsla"sv)) return {}; @@ -2931,42 +2931,42 @@ RefPtr Parser::parse_hsl_color_value(TokenStream& RefPtr alpha; auto inner_tokens = TokenStream { function_token.function().values() }; - inner_tokens.skip_whitespace(); + inner_tokens.discard_whitespace(); h = parse_hue_value(inner_tokens); if (!h) return {}; - inner_tokens.skip_whitespace(); - bool legacy_syntax = inner_tokens.peek_token().is(Token::Type::Comma); + inner_tokens.discard_whitespace(); + bool legacy_syntax = inner_tokens.next_token().is(Token::Type::Comma); if (legacy_syntax) { // Legacy syntax // , , , ? - (void)inner_tokens.next_token(); // comma - inner_tokens.skip_whitespace(); + (void)inner_tokens.consume_a_token(); // comma + inner_tokens.discard_whitespace(); s = parse_percentage_value(inner_tokens); if (!s) return {}; - inner_tokens.skip_whitespace(); + inner_tokens.discard_whitespace(); - if (!inner_tokens.next_token().is(Token::Type::Comma)) + if (!inner_tokens.consume_a_token().is(Token::Type::Comma)) return {}; - inner_tokens.skip_whitespace(); + inner_tokens.discard_whitespace(); l = parse_percentage_value(inner_tokens); if (!l) return {}; - inner_tokens.skip_whitespace(); + inner_tokens.discard_whitespace(); if (inner_tokens.has_next_token()) { // Try and read comma and alpha - if (!inner_tokens.next_token().is(Token::Type::Comma)) + if (!inner_tokens.consume_a_token().is(Token::Type::Comma)) return {}; - inner_tokens.skip_whitespace(); + inner_tokens.discard_whitespace(); alpha = parse_number_percentage_value(inner_tokens); - inner_tokens.skip_whitespace(); + inner_tokens.discard_whitespace(); if (inner_tokens.has_next_token()) return {}; @@ -2981,12 +2981,12 @@ RefPtr Parser::parse_hsl_color_value(TokenStream& s = parse_number_percentage_value(inner_tokens); if (!s) return {}; - inner_tokens.skip_whitespace(); + inner_tokens.discard_whitespace(); l = parse_number_percentage_value(inner_tokens); if (!l) return {}; - inner_tokens.skip_whitespace(); + inner_tokens.discard_whitespace(); if (inner_tokens.has_next_token()) { alpha = parse_solidus_and_alpha_value(inner_tokens); @@ -3012,9 +3012,9 @@ RefPtr Parser::parse_hwb_color_value(TokenStream& // [ / [ | none] ]? ) auto transaction = outer_tokens.begin_transaction(); - outer_tokens.skip_whitespace(); + outer_tokens.discard_whitespace(); - auto& function_token = outer_tokens.next_token(); + auto& function_token = outer_tokens.consume_a_token(); if (!function_token.is_function("hwb"sv)) return {}; @@ -3024,22 +3024,22 @@ RefPtr Parser::parse_hwb_color_value(TokenStream& RefPtr alpha; auto inner_tokens = TokenStream { function_token.function().values() }; - inner_tokens.skip_whitespace(); + inner_tokens.discard_whitespace(); h = parse_hue_value(inner_tokens); if (!h) return {}; - inner_tokens.skip_whitespace(); + inner_tokens.discard_whitespace(); w = parse_number_percentage_value(inner_tokens); if (!w) return {}; - inner_tokens.skip_whitespace(); + inner_tokens.discard_whitespace(); b = parse_number_percentage_value(inner_tokens); if (!b) return {}; - inner_tokens.skip_whitespace(); + inner_tokens.discard_whitespace(); if (inner_tokens.has_next_token()) { alpha = parse_solidus_and_alpha_value(inner_tokens); @@ -3063,9 +3063,9 @@ RefPtr Parser::parse_oklab_color_value(TokenStream | none] ]? ) auto transaction = outer_tokens.begin_transaction(); - outer_tokens.skip_whitespace(); + outer_tokens.discard_whitespace(); - auto& function_token = outer_tokens.next_token(); + auto& function_token = outer_tokens.consume_a_token(); if (!function_token.is_function("oklab"sv)) return {}; @@ -3075,22 +3075,22 @@ RefPtr Parser::parse_oklab_color_value(TokenStream alpha; auto inner_tokens = TokenStream { function_token.function().values() }; - inner_tokens.skip_whitespace(); + inner_tokens.discard_whitespace(); l = parse_number_percentage_value(inner_tokens); if (!l) return {}; - inner_tokens.skip_whitespace(); + inner_tokens.discard_whitespace(); a = parse_number_percentage_value(inner_tokens); if (!a) return {}; - inner_tokens.skip_whitespace(); + inner_tokens.discard_whitespace(); b = parse_number_percentage_value(inner_tokens); if (!b) return {}; - inner_tokens.skip_whitespace(); + inner_tokens.discard_whitespace(); if (inner_tokens.has_next_token()) { alpha = parse_solidus_and_alpha_value(inner_tokens); @@ -3114,9 +3114,9 @@ RefPtr Parser::parse_oklch_color_value(TokenStream | none] ]? ) auto transaction = outer_tokens.begin_transaction(); - outer_tokens.skip_whitespace(); + outer_tokens.discard_whitespace(); - auto& function_token = outer_tokens.next_token(); + auto& function_token = outer_tokens.consume_a_token(); if (!function_token.is_function("oklch"sv)) return {}; @@ -3126,22 +3126,22 @@ RefPtr Parser::parse_oklch_color_value(TokenStream alpha; auto inner_tokens = TokenStream { function_token.function().values() }; - inner_tokens.skip_whitespace(); + inner_tokens.discard_whitespace(); l = parse_number_percentage_value(inner_tokens); if (!l) return {}; - inner_tokens.skip_whitespace(); + inner_tokens.discard_whitespace(); c = parse_number_percentage_value(inner_tokens); if (!c) return {}; - inner_tokens.skip_whitespace(); + inner_tokens.discard_whitespace(); h = parse_hue_value(inner_tokens); if (!h) return {}; - inner_tokens.skip_whitespace(); + inner_tokens.discard_whitespace(); if (inner_tokens.has_next_token()) { alpha = parse_solidus_and_alpha_value(inner_tokens); @@ -3182,8 +3182,8 @@ RefPtr Parser::parse_color_value(TokenStream& tok return oklch; auto transaction = tokens.begin_transaction(); - tokens.skip_whitespace(); - auto component_value = tokens.next_token(); + tokens.discard_whitespace(); + auto component_value = tokens.consume_a_token(); if (component_value.is(Token::Type::Ident)) { auto ident = component_value.token().ident(); @@ -3280,13 +3280,13 @@ RefPtr Parser::parse_counter_value(TokenStream& t // their name as a . A name cannot match the keyword none; // such an identifier is invalid as a . auto transaction = tokens.begin_transaction(); - tokens.skip_whitespace(); + tokens.discard_whitespace(); auto counter_name = parse_custom_ident_value(tokens, { "none"sv }); if (!counter_name) return {}; - tokens.skip_whitespace(); + tokens.discard_whitespace(); if (tokens.has_next_token()) return {}; @@ -3301,13 +3301,13 @@ RefPtr Parser::parse_counter_value(TokenStream& t // https://drafts.csswg.org/css-counter-styles-3/#typedef-counter-style-name // is a that is not an ASCII case-insensitive match for none. auto transaction = tokens.begin_transaction(); - tokens.skip_whitespace(); + tokens.discard_whitespace(); auto counter_style_name = parse_custom_ident_value(tokens, { "none"sv }); if (!counter_style_name) return {}; - tokens.skip_whitespace(); + tokens.discard_whitespace(); if (tokens.has_next_token()) return {}; @@ -3316,7 +3316,7 @@ RefPtr Parser::parse_counter_value(TokenStream& t }; auto transaction = tokens.begin_transaction(); - auto token = tokens.next_token(); + auto token = tokens.consume_a_token(); if (token.is_function("counter"sv)) { // counter() = counter( , ? ) auto& function = token.function(); @@ -3359,9 +3359,9 @@ RefPtr Parser::parse_counter_value(TokenStream& t return nullptr; TokenStream string_tokens { function_values[1] }; - string_tokens.skip_whitespace(); + string_tokens.discard_whitespace(); auto join_string = parse_string_value(string_tokens); - string_tokens.skip_whitespace(); + string_tokens.discard_whitespace(); if (!join_string || string_tokens.has_next_token()) return nullptr; @@ -3394,7 +3394,7 @@ RefPtr Parser::parse_counter_definitions_value(TokenStream counter_definitions; while (tokens.has_next_token()) { @@ -3402,17 +3402,17 @@ RefPtr Parser::parse_counter_definitions_value(TokenStream | - auto& token = tokens.next_token(); + auto& token = tokens.consume_a_token(); if (token.is(Token::Type::Ident)) { definition.name = token.token().ident(); definition.is_reversed = false; } else if (allow_reversed == AllowReversed::Yes && token.is_function("reversed"sv)) { TokenStream function_tokens { token.function().values() }; - function_tokens.skip_whitespace(); - auto& name_token = function_tokens.next_token(); + function_tokens.discard_whitespace(); + auto& name_token = function_tokens.consume_a_token(); if (!name_token.is(Token::Type::Ident)) break; - function_tokens.skip_whitespace(); + function_tokens.discard_whitespace(); if (function_tokens.has_next_token()) break; @@ -3421,7 +3421,7 @@ RefPtr Parser::parse_counter_definitions_value(TokenStream? definition.value = parse_integer_value(tokens); @@ -3429,7 +3429,7 @@ RefPtr Parser::parse_counter_definitions_value(TokenStream Parser::parse_ratio_value(TokenStream& tok RefPtr Parser::parse_string_value(TokenStream& tokens) { - auto peek = tokens.peek_token(); + auto peek = tokens.next_token(); if (peek.is(Token::Type::String)) { - (void)tokens.next_token(); + tokens.discard_a_token(); return StringStyleValue::create(peek.token().string()); } @@ -3485,13 +3485,13 @@ RefPtr Parser::parse_paint_value(TokenStream& tok return color; // NOTE: also accepts identifiers, so we do this identifier check last. - if (tokens.peek_token().is(Token::Type::Ident)) { - auto maybe_keyword = keyword_from_string(tokens.peek_token().token().ident()); + if (tokens.next_token().is(Token::Type::Ident)) { + auto maybe_keyword = keyword_from_string(tokens.next_token().token().ident()); if (maybe_keyword.has_value()) { // FIXME: Accept `context-fill` and `context-stroke` switch (*maybe_keyword) { case Keyword::None: - (void)tokens.next_token(); + tokens.discard_a_token(); return CSSKeywordValue::create(*maybe_keyword); default: return nullptr; @@ -3507,7 +3507,7 @@ RefPtr Parser::parse_paint_value(TokenStream& tok return *color_or_none; if (auto url = parse_url_value(tokens)) { - tokens.skip_whitespace(); + tokens.discard_whitespace(); if (auto color_or_none = parse_color_or_none(); color_or_none == nullptr) { // Fail to parse if the fallback is invalid, but otherwise ignore it. // FIXME: Use fallback color @@ -3593,8 +3593,8 @@ RefPtr Parser::parse_position_value(TokenStream RefPtr { auto transaction = tokens.begin_transaction(); - tokens.skip_whitespace(); - auto const& token = tokens.next_token(); + tokens.discard_whitespace(); + auto const& token = tokens.consume_a_token(); // [ left | center | right | top | bottom ] if (auto maybe_edge = parse_position_edge(token); maybe_edge.has_value()) { @@ -3627,17 +3627,17 @@ RefPtr Parser::parse_position_value(TokenStream RefPtr { auto transaction = tokens.begin_transaction(); - tokens.skip_whitespace(); + tokens.discard_whitespace(); // Parse out two position edges - auto maybe_first_edge = parse_position_edge(tokens.next_token()); + auto maybe_first_edge = parse_position_edge(tokens.consume_a_token()); if (!maybe_first_edge.has_value()) return nullptr; auto first_edge = maybe_first_edge.release_value(); - tokens.skip_whitespace(); + tokens.discard_whitespace(); - auto maybe_second_edge = parse_position_edge(tokens.next_token()); + auto maybe_second_edge = parse_position_edge(tokens.consume_a_token()); if (!maybe_second_edge.has_value()) return nullptr; @@ -3664,8 +3664,8 @@ RefPtr Parser::parse_position_value(TokenStream RefPtr { - tokens.skip_whitespace(); - auto const& token = tokens.next_token(); + tokens.discard_whitespace(); + auto const& token = tokens.consume_a_token(); if (auto maybe_position = parse_position_edge(token); maybe_position.has_value()) { auto position = maybe_position.release_value(); @@ -3705,15 +3705,15 @@ RefPtr Parser::parse_position_value(TokenStream Optional { - tokens.skip_whitespace(); + tokens.discard_whitespace(); - auto maybe_position = parse_position_edge(tokens.next_token()); + auto maybe_position = parse_position_edge(tokens.consume_a_token()); if (!maybe_position.has_value()) return {}; - tokens.skip_whitespace(); + tokens.discard_whitespace(); - auto maybe_length = parse_length_percentage(tokens.next_token()); + auto maybe_length = parse_length_percentage(tokens.consume_a_token()); if (!maybe_length.has_value()) return {}; @@ -3764,20 +3764,20 @@ RefPtr Parser::parse_position_value(TokenStream ? ] auto parse_position_and_maybe_length = [&]() -> Optional { - tokens.skip_whitespace(); + tokens.discard_whitespace(); - auto maybe_position = parse_position_edge(tokens.next_token()); + auto maybe_position = parse_position_edge(tokens.consume_a_token()); if (!maybe_position.has_value()) return {}; - tokens.skip_whitespace(); + tokens.discard_whitespace(); - auto maybe_length = parse_length_percentage(tokens.peek_token()); + auto maybe_length = parse_length_percentage(tokens.next_token()); if (maybe_length.has_value()) { // 'center' cannot be followed by a if (maybe_position.value() == PositionEdge::Center && maybe_length.has_value()) return {}; - tokens.next_token(); + tokens.discard_a_token(); } return PositionAndMaybeLength { @@ -3853,7 +3853,7 @@ RefPtr Parser::parse_comma_separated_value_list(TokenStream Parser::parse_simple_comma_separated_value_list(PropertyID RefPtr Parser::parse_all_as_single_keyword_value(TokenStream& tokens, Keyword keyword) { auto transaction = tokens.begin_transaction(); - tokens.skip_whitespace(); + tokens.discard_whitespace(); auto keyword_value = parse_keyword_value(tokens); - tokens.skip_whitespace(); + tokens.discard_whitespace(); if (tokens.has_next_token() || !keyword_value || keyword_value->to_keyword() != keyword) return {}; @@ -4040,12 +4040,12 @@ RefPtr Parser::parse_background_value(TokenStream }; while (tokens.has_next_token()) { - if (tokens.peek_token().is(Token::Type::Comma)) { + if (tokens.next_token().is(Token::Type::Comma)) { has_multiple_layers = true; if (!background_layer_is_valid(false)) return nullptr; complete_background_layer(); - (void)tokens.next_token(); + tokens.discard_a_token(); continue; } @@ -4091,7 +4091,7 @@ RefPtr Parser::parse_background_value(TokenStream // Attempt to parse `/ ` auto background_size_transaction = tokens.begin_transaction(); - auto& maybe_slash = tokens.next_token(); + auto& maybe_slash = tokens.consume_a_token(); if (maybe_slash.is_delim('/')) { if (auto maybe_background_size = parse_single_background_size_value(tokens)) { background_size_transaction.commit(); @@ -4491,12 +4491,12 @@ RefPtr Parser::parse_border_radius_shorthand_value(TokenStream Parser::parse_single_shadow_value(TokenStream Parser::parse_single_shadow_value(TokenStream Parser::parse_content_value(TokenStream& t bool in_alt_text = false; while (tokens.has_next_token()) { - auto& next = tokens.peek_token(); + auto& next = tokens.next_token(); if (next.is_delim('/')) { if (in_alt_text || content_values.is_empty()) return nullptr; in_alt_text = true; - (void)tokens.next_token(); + tokens.discard_a_token(); continue; } @@ -4914,7 +4914,7 @@ RefPtr Parser::parse_display_value(TokenStream& t } // Not a display value, abort. - dbgln_if(CSS_PARSER_DEBUG, "Unrecognized display value: `{}`", tokens.peek_token().to_string()); + dbgln_if(CSS_PARSER_DEBUG, "Unrecognized display value: `{}`", tokens.next_token().to_string()); return {}; } @@ -5003,10 +5003,10 @@ RefPtr Parser::parse_filter_value_list_value(TokenStream Optional { TokenStream tokens { function_values }; - tokens.skip_whitespace(); + tokens.discard_whitespace(); auto if_no_more_tokens_return = [&](auto filter) -> Optional { - tokens.skip_whitespace(); + tokens.discard_whitespace(); if (tokens.has_next_token()) return {}; return filter; @@ -5017,7 +5017,7 @@ RefPtr Parser::parse_filter_value_list_value(TokenStream Parser::parse_filter_value_list_value(TokenStream maybe_radius = {}; auto maybe_color = parse_color_value(tokens); auto x_offset = parse_length(tokens); - tokens.skip_whitespace(); + tokens.discard_whitespace(); if (!x_offset.has_value() || !tokens.has_next_token()) { return {}; } @@ -5042,7 +5042,7 @@ RefPtr Parser::parse_filter_value_list_value(TokenStream Parser::parse_filter_value_list_value(TokenStream | ]? ) if (!tokens.has_next_token()) return Filter::HueRotate {}; - auto& token = tokens.next_token(); + auto& token = tokens.consume_a_token(); if (token.is(Token::Type::Number)) { // hue-rotate(0) auto number = token.token().number(); @@ -5083,7 +5083,7 @@ RefPtr Parser::parse_filter_value_list_value(TokenStream? ) if (!tokens.has_next_token()) return Filter::Color { filter_token_to_operation(filter_token) }; - auto amount = parse_number_percentage(tokens.next_token()); + auto amount = parse_number_percentage(tokens.consume_a_token()); if (!amount.has_value()) return {}; return if_no_more_tokens_return(Filter::Color { filter_token_to_operation(filter_token), *amount }); @@ -5093,10 +5093,10 @@ RefPtr Parser::parse_filter_value_list_value(TokenStream filter_value_list {}; while (tokens.has_next_token()) { - tokens.skip_whitespace(); + tokens.discard_whitespace(); if (!tokens.has_next_token()) break; - auto& token = tokens.next_token(); + auto& token = tokens.consume_a_token(); if (!token.is_function()) return nullptr; auto filter_token = parse_filter_function_name(token.function().name()); @@ -5289,10 +5289,10 @@ RefPtr Parser::parse_font_value(TokenStream& toke auto transaction = tokens.begin_transaction(); while (tokens.has_next_token()) { - auto& peek_token = tokens.peek_token(); + auto& peek_token = tokens.next_token(); if (peek_token.is_ident("normal"sv)) { normal_count++; - (void)tokens.next_token(); + tokens.discard_a_token(); continue; } @@ -5308,8 +5308,8 @@ RefPtr Parser::parse_font_value(TokenStream& toke font_size = value.release_nonnull(); // Consume `/ line-height` if present - if (tokens.peek_token().is_delim('/')) { - (void)tokens.next_token(); + if (tokens.next_token().is_delim('/')) { + tokens.discard_a_token(); auto maybe_line_height = parse_css_value_for_property(PropertyID::LineHeight, tokens); if (!maybe_line_height) return nullptr; @@ -5381,7 +5381,7 @@ RefPtr Parser::parse_font_value(TokenStream& toke RefPtr Parser::parse_font_family_value(TokenStream& tokens) { auto next_is_comma_or_eof = [&]() -> bool { - return !tokens.has_next_token() || tokens.peek_token().is(Token::Type::Comma); + return !tokens.has_next_token() || tokens.next_token().is(Token::Type::Comma); }; // Note: Font-family names can either be a quoted string, or a keyword, or a series of custom-idents. @@ -5391,17 +5391,17 @@ RefPtr Parser::parse_font_family_value(TokenStream current_name_parts; while (tokens.has_next_token()) { - auto const& peek = tokens.peek_token(); + auto const& peek = tokens.next_token(); if (peek.is(Token::Type::String)) { // `font-family: my cool "font";` is invalid. if (!current_name_parts.is_empty()) return nullptr; - (void)tokens.next_token(); // String + tokens.discard_a_token(); // String if (!next_is_comma_or_eof()) return nullptr; font_families.append(StringStyleValue::create(peek.token().string())); - (void)tokens.next_token(); // Comma + tokens.discard_a_token(); // Comma continue; } @@ -5417,21 +5417,21 @@ RefPtr Parser::parse_font_family_value(TokenStream Parser::parse_font_language_override_value(TokenStreamstring_value(); - tokens.skip_whitespace(); + tokens.discard_whitespace(); if (tokens.has_next_token()) { dbgln_if(CSS_PARSER_DEBUG, "CSSParser: Failed to parse font-language-override: unexpected trailing tokens"); return nullptr; @@ -5509,9 +5509,9 @@ RefPtr Parser::parse_font_feature_settings_value(TokenStream = [ | on | off ]? TokenStream tag_tokens { values }; - tag_tokens.skip_whitespace(); + tag_tokens.discard_whitespace(); auto opentype_tag = parse_opentype_tag_value(tag_tokens); - tag_tokens.skip_whitespace(); + tag_tokens.discard_whitespace(); RefPtr value; if (tag_tokens.has_next_token()) { if (auto integer = parse_integer_value(tag_tokens)) { @@ -5534,7 +5534,7 @@ RefPtr Parser::parse_font_feature_settings_value(TokenStream Parser::parse_font_variation_settings_value(TokenStream> axis_tags_map; for (auto const& values : tag_values) { TokenStream tag_tokens { values }; - tag_tokens.skip_whitespace(); + tag_tokens.discard_whitespace(); auto opentype_tag = parse_opentype_tag_value(tag_tokens); - tag_tokens.skip_whitespace(); + tag_tokens.discard_whitespace(); auto number = parse_number_value(tag_tokens); - tag_tokens.skip_whitespace(); + tag_tokens.discard_whitespace(); if (!opentype_tag || !number || tag_tokens.has_next_token()) return nullptr; @@ -5630,7 +5630,7 @@ JS::GCPtr Parser::parse_font_face_rule(TokenStream TokenStream tokens { values }; if (auto percentage_value = parse_percentage_value(tokens)) { - tokens.skip_whitespace(); + tokens.discard_whitespace(); if (tokens.has_next_token()) return Error::from_string_literal("Unexpected trailing tokens"); @@ -5647,10 +5647,10 @@ JS::GCPtr Parser::parse_font_face_rule(TokenStream`"); - tokens.skip_whitespace(); + tokens.discard_whitespace(); if (tokens.has_next_token()) return Error::from_string_literal("Unexpected trailing tokens"); @@ -5685,7 +5685,7 @@ JS::GCPtr Parser::parse_font_face_rule(TokenStream Parser::parse_font_face_rule(TokenStream TokenStream token_stream { declaration.values() }; - token_stream.skip_whitespace(); - auto& token = token_stream.next_token(); - token_stream.skip_whitespace(); + token_stream.discard_whitespace(); + auto& token = token_stream.consume_a_token(); + token_stream.discard_whitespace(); if (token_stream.has_next_token()) { dbgln_if(CSS_PARSER_DEBUG, "CSSParser: Unexpected trailing tokens in font-named-instance"); continue; @@ -5929,7 +5929,7 @@ Vector Parser::parse_font_face_src(TokenStream& compo auto list_of_source_token_lists = parse_a_comma_separated_list_of_component_values(component_values); for (auto const& source_token_list : list_of_source_token_lists) { TokenStream source_tokens { source_token_list }; - source_tokens.skip_whitespace(); + source_tokens.discard_whitespace(); // [ format()]? // FIXME: Implement optional tech() function from CSS-Fonts-4. @@ -5941,13 +5941,13 @@ Vector Parser::parse_font_face_src(TokenStream& compo Optional format; - source_tokens.skip_whitespace(); + source_tokens.discard_whitespace(); if (!source_tokens.has_next_token()) { supported_sources.empend(move(url), format); continue; } - auto maybe_function = source_tokens.next_token(); + auto maybe_function = source_tokens.consume_a_token(); if (!maybe_function.is_function()) { dbgln_if(CSS_PARSER_DEBUG, "CSSParser: @font-face src invalid (token after `url()` that isn't a function: {}); discarding.", maybe_function.to_debug_string()); return {}; @@ -5956,8 +5956,8 @@ Vector Parser::parse_font_face_src(TokenStream& compo auto const& function = maybe_function.function(); if (function.name().equals_ignoring_ascii_case("format"sv)) { TokenStream format_tokens { function.values() }; - format_tokens.skip_whitespace(); - auto const& format_name_token = format_tokens.next_token(); + format_tokens.discard_whitespace(); + auto const& format_name_token = format_tokens.consume_a_token(); StringView format_name; if (format_name_token.is(Token::Type::Ident)) { format_name = format_name_token.token().ident(); @@ -5979,9 +5979,9 @@ Vector Parser::parse_font_face_src(TokenStream& compo return {}; } - source_tokens.skip_whitespace(); + source_tokens.discard_whitespace(); if (source_tokens.has_next_token()) { - dbgln_if(CSS_PARSER_DEBUG, "CSSParser: @font-face src invalid (extra token `{}`); discarding.", source_tokens.peek_token().to_debug_string()); + dbgln_if(CSS_PARSER_DEBUG, "CSSParser: @font-face src invalid (extra token `{}`); discarding.", source_tokens.next_token().to_debug_string()); return {}; } @@ -5989,7 +5989,7 @@ Vector Parser::parse_font_face_src(TokenStream& compo continue; } - auto const& first = source_tokens.next_token(); + auto const& first = source_tokens.consume_a_token(); if (first.is_function("local"sv)) { if (first.function().values().is_empty()) { continue; @@ -6016,8 +6016,8 @@ RefPtr Parser::parse_list_style_value(TokenStream auto transaction = tokens.begin_transaction(); while (tokens.has_next_token()) { - if (auto peek = tokens.peek_token(); peek.is_ident("none"sv)) { - (void)tokens.next_token(); + if (auto peek = tokens.next_token(); peek.is_ident("none"sv)) { + tokens.discard_a_token(); found_nones++; continue; } @@ -6088,7 +6088,7 @@ RefPtr Parser::parse_math_depth_value(TokenStream // auto-add | add() | auto transaction = tokens.begin_transaction(); - auto token = tokens.next_token(); + auto token = tokens.consume_a_token(); if (tokens.has_next_token()) return nullptr; @@ -6110,9 +6110,9 @@ RefPtr Parser::parse_math_depth_value(TokenStream // add() if (token.is_function("add"sv)) { auto add_tokens = TokenStream { token.function().values() }; - add_tokens.skip_whitespace(); - auto integer_token = add_tokens.next_token(); - add_tokens.skip_whitespace(); + add_tokens.discard_whitespace(); + auto integer_token = add_tokens.consume_a_token(); + add_tokens.discard_whitespace(); if (add_tokens.has_next_token()) return nullptr; if (auto integer_value = parse_something_that_resolves_to_integer(integer_token)) { @@ -6352,9 +6352,9 @@ RefPtr Parser::parse_easing_value(TokenStream& to { auto transaction = tokens.begin_transaction(); - tokens.skip_whitespace(); + tokens.discard_whitespace(); - auto const& part = tokens.next_token(); + auto const& part = tokens.consume_a_token(); if (part.is(Token::Type::Ident)) { auto name = part.token().ident(); @@ -6533,7 +6533,7 @@ RefPtr Parser::parse_transform_value(TokenStream& StyleValueVector transformations; auto transaction = tokens.begin_transaction(); while (tokens.has_next_token()) { - auto const& part = tokens.next_token(); + auto const& part = tokens.consume_a_token(); if (!part.is_function()) return nullptr; auto maybe_function = transform_function_from_string(part.function().name()); @@ -6558,9 +6558,9 @@ RefPtr Parser::parse_transform_value(TokenStream& StyleValueVector values; for (auto argument_index = 0u; argument_index < arguments.size(); ++argument_index) { TokenStream argument_tokens { arguments[argument_index] }; - argument_tokens.skip_whitespace(); + argument_tokens.discard_whitespace(); - auto const& value = argument_tokens.next_token(); + auto const& value = argument_tokens.consume_a_token(); RefPtr maybe_calc_value = parse_calculated_value(value); switch (function_metadata.parameters[argument_index].type) { @@ -6583,7 +6583,7 @@ RefPtr Parser::parse_transform_value(TokenStream& case TransformFunctionParameterType::Length: case TransformFunctionParameterType::LengthNone: { if (maybe_calc_value && maybe_calc_value->resolves_to_length()) { - (void)argument_tokens.next_token(); // calc() + argument_tokens.discard_a_token(); // calc() values.append(maybe_calc_value.release_nonnull()); } else { // FIXME: Remove this reconsume once all parsing functions are TokenStream-based. @@ -6652,7 +6652,7 @@ RefPtr Parser::parse_transform_value(TokenStream& } } - argument_tokens.skip_whitespace(); + argument_tokens.discard_whitespace(); if (argument_tokens.has_next_token()) return nullptr; } @@ -6789,7 +6789,7 @@ RefPtr Parser::parse_transition_value(TokenStream TransitionStyleValue::Transition transition; auto time_value_count = 0; - while (tokens.has_next_token() && !tokens.peek_token().is(Token::Type::Comma)) { + while (tokens.has_next_token() && !tokens.next_token().is(Token::Type::Comma)) { if (auto time = parse_time(tokens); time.has_value()) { switch (time_value_count) { case 0: @@ -6816,20 +6816,20 @@ RefPtr Parser::parse_transition_value(TokenStream continue; } - if (tokens.peek_token().is(Token::Type::Ident)) { + if (tokens.next_token().is(Token::Type::Ident)) { if (transition.property_name) { dbgln_if(CSS_PARSER_DEBUG, "Transition property has multiple property identifiers"); return {}; } - auto ident = tokens.next_token().token().ident(); + auto ident = tokens.consume_a_token().token().ident(); if (auto property = property_id_from_string(ident); property.has_value()) transition.property_name = CustomIdentStyleValue::create(ident); continue; } - dbgln_if(CSS_PARSER_DEBUG, "Transition property has unexpected token \"{}\"", tokens.peek_token().to_string()); + dbgln_if(CSS_PARSER_DEBUG, "Transition property has unexpected token \"{}\"", tokens.next_token().to_string()); return {}; } @@ -6841,10 +6841,10 @@ RefPtr Parser::parse_transition_value(TokenStream transitions.append(move(transition)); - if (!tokens.peek_token().is(Token::Type::Comma)) + if (!tokens.next_token().is(Token::Type::Comma)) break; - tokens.next_token(); + tokens.discard_a_token(); } transaction.commit(); @@ -6898,7 +6898,7 @@ Optional Parser::parse_fit_content(Vector c // equal to a min-content minimum), and limit is the track sizing function passed as an argument to fit-content(). // This is essentially calculated as the smaller of minmax(auto, max-content) and minmax(auto, limit). auto function_tokens = TokenStream(component_values); - function_tokens.skip_whitespace(); + function_tokens.discard_whitespace(); auto maybe_length_percentage = parse_length_percentage(function_tokens); if (maybe_length_percentage.has_value()) return CSS::GridFitContent(CSS::GridSize(CSS::GridSize::Type::FitContent, maybe_length_percentage.value())); @@ -6918,17 +6918,17 @@ Optional Parser::parse_min_max(Vector const& co return {}; TokenStream part_one_tokens { comma_separated_list[0] }; - part_one_tokens.skip_whitespace(); + part_one_tokens.discard_whitespace(); if (!part_one_tokens.has_next_token()) return {}; - auto current_token = part_one_tokens.next_token(); + auto current_token = part_one_tokens.consume_a_token(); auto min_grid_size = parse_grid_size(current_token); TokenStream part_two_tokens { comma_separated_list[1] }; - part_two_tokens.skip_whitespace(); + part_two_tokens.discard_whitespace(); if (!part_two_tokens.has_next_token()) return {}; - current_token = part_two_tokens.next_token(); + current_token = part_two_tokens.consume_a_token(); auto max_grid_size = parse_grid_size(current_token); if (min_grid_size.has_value() && max_grid_size.has_value()) { @@ -6955,10 +6955,10 @@ Optional Parser::parse_repeat(Vector const& com return {}; // The first argument specifies the number of repetitions. TokenStream part_one_tokens { comma_separated_list[0] }; - part_one_tokens.skip_whitespace(); + part_one_tokens.discard_whitespace(); if (!part_one_tokens.has_next_token()) return {}; - auto& current_token = part_one_tokens.next_token(); + auto& current_token = part_one_tokens.consume_a_token(); auto repeat_count = 0; if (current_token.is(Token::Type::Number) && current_token.token().number().is_integer() && current_token.token().number_value() > 0) @@ -6970,14 +6970,14 @@ Optional Parser::parse_repeat(Vector const& com // The second argument is a track list, which is repeated that number of times. TokenStream part_two_tokens { comma_separated_list[1] }; - part_two_tokens.skip_whitespace(); + part_two_tokens.discard_whitespace(); if (!part_two_tokens.has_next_token()) return {}; Vector> repeat_params; auto last_object_was_line_names = false; while (part_two_tokens.has_next_token()) { - auto token = part_two_tokens.next_token(); + auto token = part_two_tokens.consume_a_token(); Vector line_names; if (token.is_block()) { if (last_object_was_line_names) @@ -6987,12 +6987,12 @@ Optional Parser::parse_repeat(Vector const& com return {}; TokenStream block_tokens { token.block().values() }; while (block_tokens.has_next_token()) { - auto current_block_token = block_tokens.next_token(); + auto current_block_token = block_tokens.consume_a_token(); line_names.append(current_block_token.token().ident().to_string()); - block_tokens.skip_whitespace(); + block_tokens.discard_whitespace(); } repeat_params.append(GridLineNames { move(line_names) }); - part_two_tokens.skip_whitespace(); + part_two_tokens.discard_whitespace(); } else { last_object_was_line_names = false; auto track_sizing_function = parse_track_sizing_function(token); @@ -7015,7 +7015,7 @@ Optional Parser::parse_repeat(Vector const& com return {}; repeat_params.append(track_sizing_function.value()); - part_two_tokens.skip_whitespace(); + part_two_tokens.discard_whitespace(); } } @@ -7093,7 +7093,7 @@ RefPtr Parser::parse_grid_track_size_list(TokenStream> track_list; auto last_object_was_line_names = false; while (tokens.has_next_token()) { - auto token = tokens.next_token(); + auto token = tokens.consume_a_token(); if (token.is_block()) { if (last_object_was_line_names && !allow_separate_line_name_blocks) { transaction.commit(); @@ -7106,11 +7106,11 @@ RefPtr Parser::parse_grid_track_size_list(TokenStream Parser::parse_grid_auto_flow_value(TokenStream Optional { auto transaction = tokens.begin_transaction(); - auto token = tokens.next_token(); + auto token = tokens.consume_a_token(); if (!token.is(Token::Type::Ident)) return {}; auto const& ident = token.token().ident(); @@ -7157,7 +7157,7 @@ RefPtr Parser::parse_grid_auto_flow_value(TokenStream Optional { auto transaction = tokens.begin_transaction(); - auto token = tokens.next_token(); + auto token = tokens.consume_a_token(); if (!token.is(Token::Type::Ident)) return {}; auto const& ident = token.token().ident(); @@ -7194,7 +7194,7 @@ RefPtr Parser::parse_scrollbar_gutter_value(TokenStream Optional { auto transaction = tokens.begin_transaction(); - auto token = tokens.next_token(); + auto token = tokens.consume_a_token(); if (!token.is(Token::Type::Ident)) return {}; auto const& ident = token.token().ident(); @@ -7210,7 +7210,7 @@ RefPtr Parser::parse_scrollbar_gutter_value(TokenStream Optional { auto transaction = tokens.begin_transaction(); - auto token = tokens.next_token(); + auto token = tokens.consume_a_token(); if (!token.is(Token::Type::Ident)) return {}; auto const& ident = token.token().ident(); @@ -7254,7 +7254,7 @@ RefPtr Parser::parse_grid_auto_track_sizes(TokenStream> track_list; auto transaction = tokens.begin_transaction(); while (tokens.has_next_token()) { - auto token = tokens.next_token(); + auto token = tokens.consume_a_token(); auto track_sizing_function = parse_track_sizing_function(token); if (!track_sizing_function.has_value()) { transaction.commit(); @@ -7301,7 +7301,7 @@ RefPtr Parser::parse_grid_track_placement(TokenStr transaction.commit(); return GridTrackPlacementStyleValue::create(GridTrackPlacement::make_line({}, custom_ident->custom_ident().to_string())); } - auto& token = tokens.next_token(); + auto& token = tokens.consume_a_token(); if (auto maybe_calculated = parse_calculated_value(token); maybe_calculated && maybe_calculated->resolves_to_number()) { transaction.commit(); return GridTrackPlacementStyleValue::create(GridTrackPlacement::make_line(static_cast(maybe_calculated->resolve_integer().value()), {})); @@ -7325,20 +7325,20 @@ RefPtr Parser::parse_grid_track_placement(TokenStr auto span_or_position_value = 0; String identifier_value; while (tokens.has_next_token()) { - auto& token = tokens.peek_token(); + auto& token = tokens.next_token(); if (token.is_ident("auto"sv)) return nullptr; if (token.is_ident("span"sv)) { if (span_value) return nullptr; - (void)tokens.next_token(); // span + tokens.discard_a_token(); // span span_value = true; continue; } if (is_valid_integer(token)) { if (span_or_position_value != 0) return nullptr; - span_or_position_value = static_cast(tokens.next_token().token().to_integer()); + span_or_position_value = static_cast(tokens.consume_a_token().token().to_integer()); continue; } if (auto custom_ident = parse_custom_ident(tokens)) { @@ -7370,7 +7370,7 @@ RefPtr Parser::parse_grid_track_placement_shorthand_value(Propert auto end_property = (property_id == PropertyID::GridColumn) ? PropertyID::GridColumnEnd : PropertyID::GridRowEnd; auto transaction = tokens.begin_transaction(); - auto current_token = tokens.next_token(); + auto current_token = tokens.consume_a_token(); Vector track_start_placement_tokens; while (true) { @@ -7379,17 +7379,17 @@ RefPtr Parser::parse_grid_track_placement_shorthand_value(Propert track_start_placement_tokens.append(current_token); if (!tokens.has_next_token()) break; - current_token = tokens.next_token(); + current_token = tokens.consume_a_token(); } Vector track_end_placement_tokens; if (tokens.has_next_token()) { - current_token = tokens.next_token(); + current_token = tokens.consume_a_token(); while (true) { track_end_placement_tokens.append(current_token); if (!tokens.has_next_token()) break; - current_token = tokens.next_token(); + current_token = tokens.consume_a_token(); } } @@ -7443,7 +7443,7 @@ RefPtr Parser::parse_grid_track_size_list_shorthand_value(Propert bool found_forward_slash = false; while (tokens.has_next_token()) { - auto& token = tokens.next_token(); + auto& token = tokens.consume_a_token(); if (token.is_delim('/')) { if (found_forward_slash) return nullptr; @@ -7484,7 +7484,7 @@ RefPtr Parser::parse_grid_area_shorthand_value(TokenStream& placement_tokens, bool check_for_delimiter = true) -> void { while (tokens.has_next_token()) { - auto& current_token = tokens.next_token(); + auto& current_token = tokens.consume_a_token(); if (check_for_delimiter && current_token.is_delim('/')) break; placement_tokens.append(current_token); @@ -7585,9 +7585,9 @@ RefPtr Parser::parse_grid_template_areas_value(TokenStream grid_area_columns; - auto const parts = MUST(tokens.next_token().token().string().to_string().split(' ')); + auto const parts = MUST(tokens.consume_a_token().token().string().to_string().split(' ')); for (auto& part : parts) { grid_area_columns.append(part); } @@ -7631,7 +7631,7 @@ Parser::ParseErrorOr> Parser::parse_css_value(Prope bool const property_accepts_custom_ident = property_accepts_type(property_id, ValueType::CustomIdent); while (unprocessed_tokens.has_next_token()) { - auto const& token = unprocessed_tokens.next_token(); + auto const& token = unprocessed_tokens.consume_a_token(); if (token.is(Token::Type::Semicolon)) { unprocessed_tokens.reconsume_current_input_token(); @@ -7949,7 +7949,7 @@ Parser::ParseErrorOr> Parser::parse_css_value(Prope } // No property matched, so we're done. - dbgln("No property (from {} properties) matched {}", unassigned_properties.size(), stream.peek_token().to_debug_string()); + dbgln("No property (from {} properties) matched {}", unassigned_properties.size(), stream.next_token().to_debug_string()); for (auto id : unassigned_properties) dbgln(" {}", string_from_property_id(id)); break; @@ -7958,7 +7958,7 @@ Parser::ParseErrorOr> Parser::parse_css_value(Prope for (auto& property : unassigned_properties) assigned_values.ensure(to_underlying(property)).append(property_initial_value(m_context.realm(), property)); - stream.skip_whitespace(); + stream.discard_whitespace(); if (stream.has_next_token()) return ParseError::SyntaxError; @@ -8010,7 +8010,7 @@ Optional Parser::parse_css_value_for_properties(Readon return {}; }; - auto& peek_token = tokens.peek_token(); + auto& peek_token = tokens.next_token(); if (auto property = any_property_accepts_type(property_ids, ValueType::EasingFunction); property.has_value()) { if (auto maybe_easing_function = parse_easing_value(tokens)) @@ -8023,7 +8023,7 @@ Optional Parser::parse_css_value_for_properties(Readon auto keyword = keyword_from_string(peek_token.token().ident()); if (keyword.has_value()) { if (auto property = any_property_accepts_keyword(property_ids, keyword.value()); property.has_value()) { - (void)tokens.next_token(); + tokens.discard_a_token(); return PropertyAndValue { *property, CSSKeywordValue::create(keyword.value()) }; } } @@ -8078,14 +8078,14 @@ Optional Parser::parse_css_value_for_properties(Readon if (peek_token.token().number().is_integer() && property_accepting_integer.has_value()) { auto integer = IntegerStyleValue::create(peek_token.token().number().integer_value()); if (property_accepts_integer(*property_accepting_integer, integer->as_integer().integer())) { - (void)tokens.next_token(); // integer + tokens.discard_a_token(); // integer return PropertyAndValue { *property_accepting_integer, integer }; } } if (property_accepting_number.has_value()) { auto number = NumberStyleValue::create(peek_token.token().number().value()); if (property_accepts_number(*property_accepting_number, number->as_number().number())) { - (void)tokens.next_token(); // number + tokens.discard_a_token(); // number return PropertyAndValue { *property_accepting_number, number }; } } @@ -8099,7 +8099,7 @@ Optional Parser::parse_css_value_for_properties(Readon if (peek_token.is(Token::Type::Percentage)) { auto percentage = Percentage(peek_token.token().percentage()); if (auto property = any_property_accepts_type(property_ids, ValueType::Percentage); property.has_value() && property_accepts_percentage(*property, percentage)) { - (void)tokens.next_token(); + tokens.discard_a_token(); return PropertyAndValue { *property, PercentageStyleValue::create(percentage) }; } } @@ -8111,7 +8111,7 @@ Optional Parser::parse_css_value_for_properties(Readon if (peek_token.is(Token::Type::String)) { if (auto property = any_property_accepts_type(property_ids, ValueType::String); property.has_value()) - return PropertyAndValue { *property, StringStyleValue::create(tokens.next_token().token().string()) }; + return PropertyAndValue { *property, StringStyleValue::create(tokens.consume_a_token().token().string()) }; } if (auto property = any_property_accepts_type(property_ids, ValueType::Url); property.has_value()) { @@ -8130,7 +8130,7 @@ Optional Parser::parse_css_value_for_properties(Readon if (property_accepts_dimension) { if (peek_token.is(Token::Type::Number) && m_context.is_parsing_svg_presentation_attribute()) { auto transaction = tokens.begin_transaction(); - auto token = tokens.next_token(); + auto token = tokens.consume_a_token(); // https://svgwg.org/svg2-draft/types.html#presentation-attribute-css-value // We need to allow in any place that expects a or . // FIXME: How should these numbers be interpreted? https://github.com/w3c/svgwg/issues/792 @@ -8149,7 +8149,7 @@ Optional Parser::parse_css_value_for_properties(Readon auto transaction = tokens.begin_transaction(); if (auto maybe_dimension = parse_dimension(peek_token); maybe_dimension.has_value()) { - (void)tokens.next_token(); + tokens.discard_a_token(); auto dimension = maybe_dimension.release_value(); if (dimension.is_angle()) { auto angle = dimension.angle(); @@ -8200,7 +8200,7 @@ Optional Parser::parse_css_value_for_properties(Readon // we parse it once, and then see if its resolved type matches what the property accepts. if (peek_token.is_function() && (property_accepts_dimension || property_accepts_numeric)) { if (auto maybe_calculated = parse_calculated_value(peek_token); maybe_calculated) { - (void)tokens.next_token(); + tokens.discard_a_token(); auto& calculated = *maybe_calculated; // This is a bit sensitive to ordering: `` and `` have to be checked before ``. // FIXME: When parsing SVG presentation attributes, is permitted wherever , , or are. @@ -8721,7 +8721,7 @@ bool Parser::expand_variables(DOM::Element& element, Optional block_values; @@ -8748,11 +8748,11 @@ bool Parser::expand_variables(DOM::Element& element, Optional& source, Vector& dest) { while (source.has_next_token()) { - auto const& value = source.next_token(); + auto const& value = source.consume_a_token(); if (value.is_function()) { if (value.function().name().equals_ignoring_ascii_case("attr"sv)) { if (!substitute_attr_function(element, property_name, value.function(), dest)) @@ -8869,30 +8869,30 @@ bool Parser::substitute_attr_function(DOM::Element& element, FlyString const& pr // attr() = attr( ? , ?) // = string | url | ident | color | number | percentage | length | angle | time | frequency | flex | TokenStream attr_contents { attr_function.values() }; - attr_contents.skip_whitespace(); + attr_contents.discard_whitespace(); if (!attr_contents.has_next_token()) return false; // - Attribute name // FIXME: Support optional attribute namespace - if (!attr_contents.peek_token().is(Token::Type::Ident)) + if (!attr_contents.next_token().is(Token::Type::Ident)) return false; - auto attribute_name = attr_contents.next_token().token().ident(); - attr_contents.skip_whitespace(); + auto attribute_name = attr_contents.consume_a_token().token().ident(); + attr_contents.discard_whitespace(); // - Attribute type (optional) auto attribute_type = "string"_fly_string; - if (attr_contents.peek_token().is(Token::Type::Ident)) { - attribute_type = attr_contents.next_token().token().ident(); - attr_contents.skip_whitespace(); + if (attr_contents.next_token().is(Token::Type::Ident)) { + attribute_type = attr_contents.consume_a_token().token().ident(); + attr_contents.discard_whitespace(); } // - Comma, then fallback values (optional) bool has_fallback_values = false; if (attr_contents.has_next_token()) { - if (!attr_contents.peek_token().is(Token::Type::Comma)) + if (!attr_contents.next_token().is(Token::Type::Comma)) return false; - (void)attr_contents.next_token(); // Comma + (void)attr_contents.consume_a_token(); // Comma has_fallback_values = true; } diff --git a/Userland/Libraries/LibWeb/CSS/Parser/SelectorParsing.cpp b/Userland/Libraries/LibWeb/CSS/Parser/SelectorParsing.cpp index 17b2a0c7971..8293a9c2f13 100644 --- a/Userland/Libraries/LibWeb/CSS/Parser/SelectorParsing.cpp +++ b/Userland/Libraries/LibWeb/CSS/Parser/SelectorParsing.cpp @@ -115,11 +115,11 @@ Parser::ParseErrorOr> Parser::parse_complex_selector(Tok Parser::ParseErrorOr> Parser::parse_compound_selector(TokenStream& tokens) { - tokens.skip_whitespace(); + tokens.discard_whitespace(); auto combinator = parse_selector_combinator(tokens).value_or(Selector::Combinator::Descendant); - tokens.skip_whitespace(); + tokens.discard_whitespace(); Vector simple_selectors; @@ -138,7 +138,7 @@ Parser::ParseErrorOr> Parser::parse_compoun Optional Parser::parse_selector_combinator(TokenStream& tokens) { - auto const& current_value = tokens.next_token(); + auto const& current_value = tokens.consume_a_token(); if (current_value.is(Token::Type::Delim)) { switch (current_value.token().delim()) { case '>': @@ -148,12 +148,12 @@ Optional Parser::parse_selector_combinator(TokenStream Parser::parse_selector_qualifi auto transaction = tokens.begin_transaction(); - auto first_token = tokens.next_token(); + auto first_token = tokens.consume_a_token(); if (first_token.is_delim('|')) { // Case 1: `|` - if (is_name(tokens.peek_token())) { - auto name_token = tokens.next_token(); + if (is_name(tokens.next_token())) { + auto name_token = tokens.consume_a_token(); if (allow_wildcard_name == AllowWildcardName::No && name_token.is_delim('*')) return {}; @@ -205,11 +205,11 @@ Optional Parser::parse_selector_qualifi if (!is_name(first_token)) return {}; - if (tokens.peek_token().is_delim('|') && is_name(tokens.peek_token(1))) { + if (tokens.next_token().is_delim('|') && is_name(tokens.peek_token(1))) { // Case 2: `|` - (void)tokens.next_token(); // `|` + tokens.discard_a_token(); // `|` auto namespace_ = get_name(first_token); - auto name = get_name(tokens.next_token()); + auto name = get_name(tokens.consume_a_token()); if (allow_wildcard_name == AllowWildcardName::No && name == "*"sv) return {}; @@ -242,7 +242,7 @@ Parser::ParseErrorOr Parser::parse_attribute_simple_se { auto attribute_tokens = TokenStream { first_value.block().values() }; - attribute_tokens.skip_whitespace(); + attribute_tokens.discard_whitespace(); if (!attribute_tokens.has_next_token()) { dbgln_if(CSS_PARSER_DEBUG, "CSS attribute selector is empty!"); @@ -251,7 +251,7 @@ Parser::ParseErrorOr Parser::parse_attribute_simple_se auto maybe_qualified_name = parse_selector_qualified_name(attribute_tokens, AllowWildcardName::No); if (!maybe_qualified_name.has_value()) { - dbgln_if(CSS_PARSER_DEBUG, "Expected qualified-name for attribute name, got: '{}'", attribute_tokens.peek_token().to_debug_string()); + dbgln_if(CSS_PARSER_DEBUG, "Expected qualified-name for attribute name, got: '{}'", attribute_tokens.next_token().to_debug_string()); return ParseError::SyntaxError; } auto qualified_name = maybe_qualified_name.release_value(); @@ -265,11 +265,11 @@ Parser::ParseErrorOr Parser::parse_attribute_simple_se } }; - attribute_tokens.skip_whitespace(); + attribute_tokens.discard_whitespace(); if (!attribute_tokens.has_next_token()) return simple_selector; - auto const& delim_part = attribute_tokens.next_token(); + auto const& delim_part = attribute_tokens.consume_a_token(); if (!delim_part.is(Token::Type::Delim)) { dbgln_if(CSS_PARSER_DEBUG, "Expected a delim for attribute comparison, got: '{}'", delim_part.to_debug_string()); return ParseError::SyntaxError; @@ -283,7 +283,7 @@ Parser::ParseErrorOr Parser::parse_attribute_simple_se return ParseError::SyntaxError; } - auto const& delim_second_part = attribute_tokens.next_token(); + auto const& delim_second_part = attribute_tokens.consume_a_token(); if (!delim_second_part.is_delim('=')) { dbgln_if(CSS_PARSER_DEBUG, "Expected a double delim for attribute comparison, got: '{}{}'", delim_part.to_debug_string(), delim_second_part.to_debug_string()); return ParseError::SyntaxError; @@ -309,13 +309,13 @@ Parser::ParseErrorOr Parser::parse_attribute_simple_se } } - attribute_tokens.skip_whitespace(); + attribute_tokens.discard_whitespace(); if (!attribute_tokens.has_next_token()) { dbgln_if(CSS_PARSER_DEBUG, "Attribute selector ended without a value to match."); return ParseError::SyntaxError; } - auto const& value_part = attribute_tokens.next_token(); + auto const& value_part = attribute_tokens.consume_a_token(); if (!value_part.is(Token::Type::Ident) && !value_part.is(Token::Type::String)) { dbgln_if(CSS_PARSER_DEBUG, "Expected a string or ident for the value to match attribute against, got: '{}'", value_part.to_debug_string()); return ParseError::SyntaxError; @@ -323,10 +323,10 @@ Parser::ParseErrorOr Parser::parse_attribute_simple_se auto const& value_string = value_part.token().is(Token::Type::Ident) ? value_part.token().ident() : value_part.token().string(); simple_selector.attribute().value = value_string.to_string(); - attribute_tokens.skip_whitespace(); + attribute_tokens.discard_whitespace(); // Handle case-sensitivity suffixes. https://www.w3.org/TR/selectors-4/#attribute-case if (attribute_tokens.has_next_token()) { - auto const& case_sensitivity_part = attribute_tokens.next_token(); + auto const& case_sensitivity_part = attribute_tokens.consume_a_token(); if (case_sensitivity_part.is(Token::Type::Ident)) { auto case_sensitivity = case_sensitivity_part.token().ident(); if (case_sensitivity.equals_ignoring_ascii_case("i"sv)) { @@ -354,7 +354,7 @@ Parser::ParseErrorOr Parser::parse_attribute_simple_se Parser::ParseErrorOr Parser::parse_pseudo_simple_selector(TokenStream& tokens) { auto peek_token_ends_selector = [&]() -> bool { - auto const& value = tokens.peek_token(); + auto const& value = tokens.next_token(); return (value.is(Token::Type::EndOfFile) || value.is(Token::Type::Whitespace) || value.is(Token::Type::Comma)); }; @@ -362,15 +362,15 @@ Parser::ParseErrorOr Parser::parse_pseudo_simple_selec return ParseError::SyntaxError; bool is_pseudo = false; - if (tokens.peek_token().is(Token::Type::Colon)) { + if (tokens.next_token().is(Token::Type::Colon)) { is_pseudo = true; - tokens.next_token(); + tokens.discard_a_token(); if (peek_token_ends_selector()) return ParseError::SyntaxError; } if (is_pseudo) { - auto const& name_token = tokens.next_token(); + auto const& name_token = tokens.consume_a_token(); if (!name_token.is(Token::Type::Ident)) { dbgln_if(CSS_PARSER_DEBUG, "Expected an ident for pseudo-element, got: '{}'", name_token.to_debug_string()); return ParseError::SyntaxError; @@ -409,7 +409,7 @@ Parser::ParseErrorOr Parser::parse_pseudo_simple_selec if (peek_token_ends_selector()) return ParseError::SyntaxError; - auto const& pseudo_class_token = tokens.next_token(); + auto const& pseudo_class_token = tokens.consume_a_token(); if (pseudo_class_token.is(Token::Type::Ident)) { auto pseudo_name = pseudo_class_token.token().ident(); @@ -461,7 +461,7 @@ Parser::ParseErrorOr Parser::parse_pseudo_simple_selec return ParseError::SyntaxError; } - tokens.skip_whitespace(); + tokens.discard_whitespace(); if (!tokens.has_next_token()) { return Selector::SimpleSelector { .type = Selector::SimpleSelector::Type::PseudoClass, @@ -475,14 +475,14 @@ Parser::ParseErrorOr Parser::parse_pseudo_simple_selec return ParseError::SyntaxError; // Parse the `of ` syntax - auto const& maybe_of = tokens.next_token(); + auto const& maybe_of = tokens.consume_a_token(); if (!maybe_of.is_ident("of"sv)) return ParseError::SyntaxError; - tokens.skip_whitespace(); + tokens.discard_whitespace(); auto selector_list = TRY(parse_a_selector_list(tokens, SelectorType::Standalone)); - tokens.skip_whitespace(); + tokens.discard_whitespace(); if (tokens.has_next_token()) return ParseError::SyntaxError; @@ -558,9 +558,9 @@ Parser::ParseErrorOr Parser::parse_pseudo_simple_selec } case PseudoClassMetadata::ParameterType::Ident: { auto function_token_stream = TokenStream(pseudo_function.values()); - function_token_stream.skip_whitespace(); - auto maybe_keyword_token = function_token_stream.next_token(); - function_token_stream.skip_whitespace(); + function_token_stream.discard_whitespace(); + auto maybe_keyword_token = function_token_stream.consume_a_token(); + function_token_stream.discard_whitespace(); if (!maybe_keyword_token.is(Token::Type::Ident) || function_token_stream.has_next_token()) { dbgln_if(CSS_PARSER_DEBUG, "Failed to parse :{}() parameter as a keyword: not an ident", pseudo_function.name()); return ParseError::SyntaxError; @@ -586,8 +586,8 @@ Parser::ParseErrorOr Parser::parse_pseudo_simple_selec for (auto language_token_list : language_token_lists) { auto language_token_stream = TokenStream(language_token_list); - language_token_stream.skip_whitespace(); - auto language_token = language_token_stream.next_token(); + language_token_stream.discard_whitespace(); + auto language_token = language_token_stream.consume_a_token(); if (!(language_token.is(Token::Type::Ident) || language_token.is(Token::Type::String))) { dbgln_if(CSS_PARSER_DEBUG, "Invalid language range in :{}() - not a string/ident", pseudo_function.name()); return ParseError::SyntaxError; @@ -596,7 +596,7 @@ Parser::ParseErrorOr Parser::parse_pseudo_simple_selec auto language_string = language_token.is(Token::Type::String) ? language_token.token().string() : language_token.token().ident(); languages.append(language_string); - language_token_stream.skip_whitespace(); + language_token_stream.discard_whitespace(); if (language_token_stream.has_next_token()) { dbgln_if(CSS_PARSER_DEBUG, "Invalid language range in :{}() - trailing tokens", pseudo_function.name()); return ParseError::SyntaxError; @@ -633,7 +633,7 @@ Parser::ParseErrorOr Parser::parse_pseudo_simple_selec Parser::ParseErrorOr> Parser::parse_simple_selector(TokenStream& tokens) { auto peek_token_ends_selector = [&]() -> bool { - auto const& value = tokens.peek_token(); + auto const& value = tokens.next_token(); return (value.is(Token::Type::EndOfFile) || value.is(Token::Type::Whitespace) || value.is(Token::Type::Comma)); }; @@ -654,7 +654,7 @@ Parser::ParseErrorOr> Parser::parse_simple_se }; } - auto const& first_value = tokens.next_token(); + auto const& first_value = tokens.consume_a_token(); if (first_value.is(Token::Type::Delim)) { u32 delim = first_value.token().delim(); @@ -666,7 +666,7 @@ Parser::ParseErrorOr> Parser::parse_simple_se if (peek_token_ends_selector()) return ParseError::SyntaxError; - auto const& class_name_value = tokens.next_token(); + auto const& class_name_value = tokens.consume_a_token(); if (!class_name_value.is(Token::Type::Ident)) { dbgln_if(CSS_PARSER_DEBUG, "Expected an ident after '.', got: {}", class_name_value.to_debug_string()); return ParseError::SyntaxError; @@ -796,8 +796,8 @@ Optional Parser::parse_a_n_plus_b_patt // https://www.w3.org/TR/css-syntax-3/#the-anb-type // Unfortunately these can't be in the same order as in the spec. - values.skip_whitespace(); - auto const& first_value = values.next_token(); + values.discard_whitespace(); + auto const& first_value = values.consume_a_token(); // odd | even if (first_value.is(Token::Type::Ident)) { @@ -822,11 +822,11 @@ Optional Parser::parse_a_n_plus_b_patt // ['+' | '-'] if (is_n_dimension(first_value)) { int a = first_value.token().dimension_value_int(); - values.skip_whitespace(); + values.discard_whitespace(); // - if (is_signed_integer(values.peek_token())) { - int b = values.next_token().token().to_integer(); + if (is_signed_integer(values.next_token())) { + int b = values.consume_a_token().token().to_integer(); transaction.commit(); return Selector::SimpleSelector::ANPlusBPattern { a, b }; } @@ -834,9 +834,9 @@ Optional Parser::parse_a_n_plus_b_patt // ['+' | '-'] { auto child_transaction = transaction.create_child(); - auto const& second_value = values.next_token(); - values.skip_whitespace(); - auto const& third_value = values.next_token(); + auto const& second_value = values.consume_a_token(); + values.discard_whitespace(); + auto const& third_value = values.consume_a_token(); if (is_sign(second_value) && is_signless_integer(third_value)) { int b = third_value.token().to_integer() * (second_value.is_delim('+') ? 1 : -1); @@ -851,8 +851,8 @@ Optional Parser::parse_a_n_plus_b_patt } // if (is_ndash_dimension(first_value)) { - values.skip_whitespace(); - auto const& second_value = values.next_token(); + values.discard_whitespace(); + auto const& second_value = values.consume_a_token(); if (is_signless_integer(second_value)) { int a = first_value.token().dimension_value_int(); int b = -second_value.token().to_integer(); @@ -888,11 +888,11 @@ Optional Parser::parse_a_n_plus_b_patt // -n // -n ['+' | '-'] if (first_value.is_ident("-n"sv)) { - values.skip_whitespace(); + values.discard_whitespace(); // -n - if (is_signed_integer(values.peek_token())) { - int b = values.next_token().token().to_integer(); + if (is_signed_integer(values.next_token())) { + int b = values.consume_a_token().token().to_integer(); transaction.commit(); return Selector::SimpleSelector::ANPlusBPattern { -1, b }; } @@ -900,9 +900,9 @@ Optional Parser::parse_a_n_plus_b_patt // -n ['+' | '-'] { auto child_transaction = transaction.create_child(); - auto const& second_value = values.next_token(); - values.skip_whitespace(); - auto const& third_value = values.next_token(); + auto const& second_value = values.consume_a_token(); + values.discard_whitespace(); + auto const& third_value = values.consume_a_token(); if (is_sign(second_value) && is_signless_integer(third_value)) { int b = third_value.token().to_integer() * (second_value.is_delim('+') ? 1 : -1); @@ -917,8 +917,8 @@ Optional Parser::parse_a_n_plus_b_patt } // -n- if (first_value.is_ident("-n-"sv)) { - values.skip_whitespace(); - auto const& second_value = values.next_token(); + values.discard_whitespace(); + auto const& second_value = values.consume_a_token(); if (is_signless_integer(second_value)) { int b = -second_value.token().to_integer(); transaction.commit(); @@ -941,16 +941,16 @@ Optional Parser::parse_a_n_plus_b_patt // We do *not* skip whitespace here. } - auto const& first_after_plus = values.next_token(); + auto const& first_after_plus = values.consume_a_token(); // '+'?† n // '+'?† n // '+'?† n ['+' | '-'] if (first_after_plus.is_ident("n"sv)) { - values.skip_whitespace(); + values.discard_whitespace(); // '+'?† n - if (is_signed_integer(values.peek_token())) { - int b = values.next_token().token().to_integer(); + if (is_signed_integer(values.next_token())) { + int b = values.consume_a_token().token().to_integer(); transaction.commit(); return Selector::SimpleSelector::ANPlusBPattern { 1, b }; } @@ -958,9 +958,9 @@ Optional Parser::parse_a_n_plus_b_patt // '+'?† n ['+' | '-'] { auto child_transaction = transaction.create_child(); - auto const& second_value = values.next_token(); - values.skip_whitespace(); - auto const& third_value = values.next_token(); + auto const& second_value = values.consume_a_token(); + values.discard_whitespace(); + auto const& third_value = values.consume_a_token(); if (is_sign(second_value) && is_signless_integer(third_value)) { int b = third_value.token().to_integer() * (second_value.is_delim('+') ? 1 : -1); @@ -976,8 +976,8 @@ Optional Parser::parse_a_n_plus_b_patt // '+'?† n- if (first_after_plus.is_ident("n-"sv)) { - values.skip_whitespace(); - auto const& second_value = values.next_token(); + values.discard_whitespace(); + auto const& second_value = values.consume_a_token(); if (is_signless_integer(second_value)) { int b = -second_value.token().to_integer(); transaction.commit(); diff --git a/Userland/Libraries/LibWeb/CSS/Parser/TokenStream.h b/Userland/Libraries/LibWeb/CSS/Parser/TokenStream.h index 95f8318ae79..c3277799206 100644 --- a/Userland/Libraries/LibWeb/CSS/Parser/TokenStream.h +++ b/Userland/Libraries/LibWeb/CSS/Parser/TokenStream.h @@ -1,6 +1,6 @@ /* * Copyright (c) 2020-2021, the SerenityOS developers. - * Copyright (c) 2021-2023, Sam Atkins + * Copyright (c) 2021-2024, Sam Atkins * * SPDX-License-Identifier: BSD-2-Clause */ @@ -14,6 +14,7 @@ namespace Web::CSS::Parser { +// https://drafts.csswg.org/css-syntax/#css-token-stream template class TokenStream { public: @@ -21,14 +22,14 @@ public: public: explicit StateTransaction(TokenStream& token_stream) : m_token_stream(token_stream) - , m_saved_iterator_offset(token_stream.m_iterator_offset) + , m_saved_index(token_stream.m_index) { } ~StateTransaction() { if (!m_commit) - m_token_stream.m_iterator_offset = m_saved_iterator_offset; + m_token_stream.m_index = m_saved_index; } StateTransaction create_child() { return StateTransaction(*this); } @@ -44,13 +45,13 @@ public: explicit StateTransaction(StateTransaction& parent) : m_parent(&parent) , m_token_stream(parent.m_token_stream) - , m_saved_iterator_offset(parent.m_token_stream.m_iterator_offset) + , m_saved_index(parent.m_token_stream.m_index) { } StateTransaction* m_parent { nullptr }; TokenStream& m_token_stream; - int m_saved_iterator_offset { 0 }; + size_t m_saved_index { 0 }; bool m_commit { false }; }; @@ -74,60 +75,114 @@ public: TokenStream(TokenStream const&) = delete; TokenStream(TokenStream&&) = default; + // https://drafts.csswg.org/css-syntax/#token-stream-next-token + [[nodiscard]] T const& next_token() const + { + // The item of tokens at index. + // If that index would be out-of-bounds past the end of the list, it’s instead an . + if (m_index < m_tokens.size()) + return m_tokens[m_index]; + return m_eof; + } + + // https://drafts.csswg.org/css-syntax/#token-stream-empty + [[nodiscard]] bool is_empty() const + { + // A token stream is empty if the next token is an . + return next_token().is(Token::Type::EndOfFile); + } + + // https://drafts.csswg.org/css-syntax/#token-stream-consume-a-token + [[nodiscard]] T const& consume_a_token() + { + // Let token be the next token. Increment index, then return token. + auto& token = next_token(); + ++m_index; + return token; + } + + // https://drafts.csswg.org/css-syntax/#token-stream-discard-a-token + void discard_a_token() + { + // If the token stream is not empty, increment index. + if (!is_empty()) + ++m_index; + } + + // https://drafts.csswg.org/css-syntax/#token-stream-mark + void mark() + { + // Append index to marked indexes. + m_marked_indexes.append(m_index); + } + + // https://drafts.csswg.org/css-syntax/#token-stream-restore-a-mark + void restore_a_mark() + { + // Pop from marked indexes, and set index to the popped value. + m_index = m_marked_indexes.take_last(); + } + + // https://drafts.csswg.org/css-syntax/#token-stream-discard-a-mark + void discard_a_mark() + { + // Pop from marked indexes, and do nothing with the popped value. + m_marked_indexes.take_last(); + } + + // https://drafts.csswg.org/css-syntax/#token-stream-discard-whitespace + void discard_whitespace() + { + // While the next token is a , discard a token. + while (next_token().is(Token::Type::Whitespace)) + discard_a_token(); + } + bool has_next_token() { - return (size_t)(m_iterator_offset + 1) < m_tokens.size(); - } - - T const& next_token() - { - if (!has_next_token()) - return m_eof; - - ++m_iterator_offset; - - return m_tokens.at(m_iterator_offset); - } - - T const& peek_token(int offset = 0) - { - if (!has_next_token()) - return m_eof; - - return m_tokens.at(m_iterator_offset + offset + 1); + return !is_empty(); } + // Deprecated, used in older versions of the spec. T const& current_token() { - if ((size_t)m_iterator_offset >= m_tokens.size()) + if (m_index < 1 || (m_index - 1) >= m_tokens.size()) return m_eof; - return m_tokens.at(m_iterator_offset); + return m_tokens.at(m_index - 1); } + // Deprecated + T const& peek_token(size_t offset = 0) + { + if (remaining_token_count() <= offset) + return m_eof; + + return m_tokens.at(m_index + offset); + } + + // Deprecated, was used in older versions of the spec. void reconsume_current_input_token() { - if (m_iterator_offset >= 0) - --m_iterator_offset; + if (m_index > 0) + --m_index; } StateTransaction begin_transaction() { return StateTransaction(*this); } - void skip_whitespace() + size_t remaining_token_count() const { - while (peek_token().is(Token::Type::Whitespace)) - next_token(); + if (m_tokens.size() > m_index) + return m_tokens.size() - m_index; + return 0; } - size_t token_count() const { return m_tokens.size(); } - size_t remaining_token_count() const { return token_count() - m_iterator_offset - 1; } - void dump_all_tokens() { dbgln("Dumping all tokens:"); for (size_t i = 0; i < m_tokens.size(); ++i) { auto& token = m_tokens[i]; - if ((i - 1) == (size_t)m_iterator_offset) + if (i == m_index) dbgln("-> {}", token.to_debug_string()); else dbgln(" {}", token.to_debug_string()); @@ -136,12 +191,18 @@ public: void copy_state(Badge, TokenStream const& other) { - m_iterator_offset = other.m_iterator_offset; + m_index = other.m_index; } private: + // https://drafts.csswg.org/css-syntax/#token-stream-tokens Span m_tokens; - int m_iterator_offset { -1 }; + + // https://drafts.csswg.org/css-syntax/#token-stream-index + size_t m_index { 0 }; + + // https://drafts.csswg.org/css-syntax/#token-stream-marked-indexes + Vector m_marked_indexes; T make_eof() {