ladybird/Userland/Libraries/LibSQL/AST/SyntaxHighlighter.cpp
Jan de Visser 4198f7e1af LibSQL: Move Lexer and Parser machinery to AST directory
The SQL engine is expected to be a fairly sizeable piece of software.
Therefore we're starting to restructure the codebase for growth.
2021-06-24 00:36:53 +02:00

112 lines
3.3 KiB
C++

/*
* Copyright (c) 2021, Dylan Katz <dykatz@uw.edu>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#include <AK/Debug.h>
#include <LibGfx/Palette.h>
#include <LibSQL/AST/Lexer.h>
#include <LibSQL/AST/SyntaxHighlighter.h>
namespace SQL::AST {
static Syntax::TextStyle style_for_token_type(Gfx::Palette const& palette, TokenType type)
{
switch (Token::category(type)) {
case TokenCategory::Keyword:
return { palette.syntax_keyword(), true };
case TokenCategory::Identifier:
return { palette.syntax_identifier(), false };
case TokenCategory::Number:
return { palette.syntax_number(), false };
case TokenCategory::Blob:
case TokenCategory::String:
return { palette.syntax_string(), false };
case TokenCategory::Operator:
return { palette.syntax_operator(), false };
case TokenCategory::Punctuation:
return { palette.syntax_punctuation(), false };
case TokenCategory::Invalid:
default:
return { palette.base_text(), false };
}
}
bool SyntaxHighlighter::is_identifier(u64 token) const
{
auto sql_token = static_cast<TokenType>(static_cast<size_t>(token));
return sql_token == TokenType::Identifier;
}
void SyntaxHighlighter::rehighlight(Palette const& palette)
{
auto text = m_client->get_text();
Lexer lexer(text);
Vector<GUI::TextDocumentSpan> spans;
auto append_token = [&](StringView str, Token const& token) {
if (str.is_empty())
return;
GUI::TextPosition position { token.line_number() - 1, token.line_column() - 1 };
for (char c : str) {
if (c == '\n') {
position.set_line(position.line() + 1);
position.set_column(0);
} else
position.set_column(position.column() + 1);
}
GUI::TextDocumentSpan span;
span.range.set_start({ token.line_number() - 1, token.line_column() - 1 });
span.range.set_end({ position.line(), position.column() });
auto style = style_for_token_type(palette, token.type());
span.attributes.color = style.color;
span.attributes.bold = style.bold;
span.data = static_cast<u64>(token.type());
spans.append(span);
dbgln_if(SYNTAX_HIGHLIGHTING_DEBUG, "{} @ '{}' {}:{} - {}:{}",
token.name(),
token.value(),
span.range.start().line(), span.range.start().column(),
span.range.end().line(), span.range.end().column());
};
for (;;) {
auto token = lexer.next();
append_token(token.value(), token);
if (token.type() == TokenType::Eof)
break;
}
m_client->do_set_spans(move(spans));
m_has_brace_buddies = false;
highlight_matching_token_pair();
m_client->do_update();
}
Vector<SyntaxHighlighter::MatchingTokenPair> SyntaxHighlighter::matching_token_pairs_impl() const
{
static Vector<SyntaxHighlighter::MatchingTokenPair> pairs;
if (pairs.is_empty()) {
pairs.append({ static_cast<u64>(TokenType::ParenOpen), static_cast<u64>(TokenType::ParenClose) });
}
return pairs;
}
bool SyntaxHighlighter::token_types_equal(u64 token1, u64 token2) const
{
return static_cast<TokenType>(token1) == static_cast<TokenType>(token2);
}
SyntaxHighlighter::~SyntaxHighlighter()
{
}
}