Skip to content

Make RawTokenKind a trivial enum #1334

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Feb 12, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ let syntaxClassificationFile = SourceFileSyntax {

try! ExtensionDeclSyntax("extension RawTokenKind") {
try VariableDeclSyntax("internal var classification: SyntaxClassification") {
try SwitchExprSyntax("switch self.base") {
try SwitchExprSyntax("switch self") {
for token in SYNTAX_TOKENS {
SwitchCaseSyntax("case .\(raw: token.swiftKind):") {
if let classification = token.classification {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ let declarationModifierFile = SourceFileSyntax {
}

try InitializerDeclSyntax("init?(lexeme: Lexer.Lexeme)") {
try SwitchExprSyntax("switch lexeme") {
try SwitchExprSyntax("switch PrepareForKeywordMatch(lexeme)") {
for attribute in DECL_MODIFIER_KINDS {
SwitchCaseSyntax("case TokenSpec(.\(raw: attribute.swiftName)):") {
ExprSyntax("self = .\(raw: attribute.swiftName)")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ let typeAttributeFile = SourceFileSyntax {
}

try InitializerDeclSyntax("init?(lexeme: Lexer.Lexeme)") {
SwitchExprSyntax(switchKeyword: .keyword(.switch), expression: ExprSyntax("lexeme")) {
try! SwitchExprSyntax("switch PrepareForKeywordMatch(lexeme)") {
for attribute in TYPE_ATTR_KINDS {
SwitchCaseSyntax("case TokenSpec(.\(raw: attribute.name)):") {
ExprSyntax("self = .\(raw: attribute.swiftName)")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -197,101 +197,28 @@ let tokenKindFile = SourceFileSyntax {
// `RawTokenBaseKind` for equality. With the raw value, it compiles down to
// a primitive integer compare, without, it calls into `__derived_enum_equals`.
@frozen // FIXME: Not actually stable, works around a miscompile
public enum RawTokenBaseKind: UInt8, Equatable, Hashable
public enum RawTokenKind: UInt8, Equatable, Hashable
"""
) {
DeclSyntax("case eof")

for token in SYNTAX_TOKENS {
DeclSyntax("case \(raw: token.swiftKind)")
}
}

DeclSyntax(
"""
fileprivate extension Keyword {
static var rawValueZero: Keyword {
return Keyword(rawValue: 0)!
}
}
"""
)

try! StructDeclSyntax(
"""
/// Similar to `TokenKind` but without a `String` associated value.
/// Technically, this should be an enum like
/// ```
/// enum RawTokenKind {
/// case eof
/// case associatedtypeKeyword
/// // remaining case from `RawTokenBaseKind`...
/// case keyword(Keyword)
/// }
/// ```
///
/// But modelling it this way has significant performance implications since
/// comparing two `RawTokenKind` calls into `__derived_enum_equals`. It's more
/// effient to model the base kind as an enum with a raw value and store the
/// keyword separately.
///
/// Whenever `base` is not `keyword`, `keyword` should have a raw value
/// of `0`.
@frozen // FIXME: Not actually stable, works around a miscompile
public struct RawTokenKind: Equatable, Hashable
"""
) {
DeclSyntax("public let base: RawTokenBaseKind")
DeclSyntax("public let keyword: Keyword")

DeclSyntax(
"""
public init(base: RawTokenBaseKind, keyword: Keyword) {
assert(base == .keyword || keyword.rawValue == 0)
self.base = base
self.keyword = keyword
}
"""
)

DeclSyntax(
"""
public static var eof: RawTokenKind {
return RawTokenKind(base: .eof, keyword: .rawValueZero)
}
"""
)
for token in SYNTAX_TOKENS where token.swiftKind != "keyword" {
try VariableDeclSyntax("public static var \(raw: token.swiftKind): RawTokenKind") {
StmtSyntax("return RawTokenKind(base: .\(raw: token.swiftKind), keyword: .rawValueZero)")
}
}

DeclSyntax(
"""
public static func keyword(_ keyword: Keyword) -> RawTokenKind {
return RawTokenKind(base: .keyword, keyword: keyword)
}
"""
)

try VariableDeclSyntax(
"""
@_spi(RawSyntax)
public var defaultText: SyntaxText?
"""
) {
try! SwitchExprSyntax("switch self.base") {
try! SwitchExprSyntax("switch self") {
SwitchCaseSyntax("case .eof:") {
StmtSyntax(#"return """#)
}

for token in SYNTAX_TOKENS {
if token.swiftKind == "keyword" {
SwitchCaseSyntax("case .\(raw: token.swiftKind):") {
StmtSyntax("return self.keyword.defaultText")
}
} else if let text = token.text {
if let text = token.text {
SwitchCaseSyntax("case .\(raw: token.swiftKind):") {
StmtSyntax("return #\"\(raw: text)\"#")
}
Expand All @@ -314,7 +241,7 @@ let tokenKindFile = SourceFileSyntax {
public var isPunctuation: Bool
"""
) {
try! SwitchExprSyntax("switch self.base") {
try! SwitchExprSyntax("switch self") {
SwitchCaseSyntax("case .eof:") {
StmtSyntax("return false")
}
Expand All @@ -336,16 +263,22 @@ let tokenKindFile = SourceFileSyntax {
public static func fromRaw(kind rawKind: RawTokenKind, text: String) -> TokenKind
"""
) {
try! SwitchExprSyntax("switch rawKind.base") {
try! SwitchExprSyntax("switch rawKind") {
SwitchCaseSyntax("case .eof:") {
StmtSyntax("return .eof")
}

for token in SYNTAX_TOKENS {
if token.swiftKind == "keyword" {
SwitchCaseSyntax("case .\(raw: token.swiftKind):") {
ExprSyntax("assert(text.isEmpty || String(syntaxText: rawKind.keyword.defaultText) == text)")
StmtSyntax("return .keyword(rawKind.keyword)")
DeclSyntax("var text = text")
StmtSyntax(
"""
return text.withSyntaxText { text in
return .keyword(Keyword(text)!)
}
"""
)
}
} else if token.text != nil {
SwitchCaseSyntax("case .\(raw: token.swiftKind):") {
Expand Down Expand Up @@ -377,7 +310,7 @@ let tokenKindFile = SourceFileSyntax {
for token in SYNTAX_TOKENS {
if token.swiftKind == "keyword" {
SwitchCaseSyntax("case .\(raw: token.swiftKind)(let keyword):") {
StmtSyntax("return (.\(raw: token.swiftKind)(keyword), nil)")
StmtSyntax("return (.\(raw: token.swiftKind), String(syntaxText: keyword.defaultText))")
}
} else if token.text != nil {
SwitchCaseSyntax("case .\(raw: token.swiftKind):") {
Expand Down
2 changes: 1 addition & 1 deletion Sources/IDEUtils/generated/SyntaxClassification.swift
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ extension SyntaxClassification {

extension RawTokenKind {
internal var classification: SyntaxClassification {
switch self.base {
switch self {
case .wildcard:
return .none
case .leftParen:
Expand Down
10 changes: 5 additions & 5 deletions Sources/SwiftParser/Attributes.swift
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ extension Parser {
case transpose

init?(lexeme: Lexer.Lexeme) {
switch lexeme {
switch PrepareForKeywordMatch(lexeme) {
case TokenSpec(._alignment): self = ._alignment
case TokenSpec(._backDeploy): self = ._backDeploy
case TokenSpec(._cdecl): self = ._cdecl
Expand Down Expand Up @@ -366,7 +366,7 @@ extension Parser {
case forward

init?(lexeme: Lexer.Lexeme) {
switch lexeme {
switch PrepareForKeywordMatch(lexeme) {
case TokenSpec(.reverse): self = .reverse
case TokenSpec(._linear): self = .linear
case TokenSpec(._forward): self = .forward
Expand Down Expand Up @@ -477,7 +477,7 @@ extension Parser {
case selfKeyword

init?(lexeme: Lexer.Lexeme) {
switch lexeme {
switch PrepareForKeywordMatch(lexeme) {
case TokenSpec(.identifier): self = .identifier
case TokenSpec(.integerLiteral): self = .integerLiteral
case TokenSpec(.self): self = .selfKeyword
Expand Down Expand Up @@ -661,7 +661,7 @@ extension Parser {
case available

init?(lexeme: Lexer.Lexeme) {
switch lexeme {
switch PrepareForKeywordMatch(lexeme) {
case TokenSpec(.target): self = .target
case TokenSpec(.availability): self = .availability
case TokenSpec(.exported): self = .exported
Expand Down Expand Up @@ -1102,7 +1102,7 @@ extension Parser {
}

init?(lexeme: Lexer.Lexeme) {
switch lexeme {
switch PrepareForKeywordMatch(lexeme) {
case TokenSpec(.private): self = .private
case TokenSpec(.fileprivate): self = .fileprivate
case TokenSpec(.internal): self = .internal
Expand Down
2 changes: 1 addition & 1 deletion Sources/SwiftParser/Availability.swift
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ extension Parser {
case identifier

init?(lexeme: Lexer.Lexeme) {
switch lexeme {
switch PrepareForKeywordMatch(lexeme) {
case TokenSpec(.message): self = .message
case TokenSpec(.renamed): self = .renamed
case TokenSpec(.introduced): self = .introduced
Expand Down
8 changes: 4 additions & 4 deletions Sources/SwiftParser/Declarations.swift
Original file line number Diff line number Diff line change
Expand Up @@ -345,7 +345,7 @@ extension Parser {
}

init?(lexeme: Lexer.Lexeme) {
switch lexeme {
switch PrepareForKeywordMatch(lexeme) {
case TokenSpec(.typealias): self = .typealias
case TokenSpec(.struct): self = .struct
case TokenSpec(.class): self = .class
Expand Down Expand Up @@ -556,7 +556,7 @@ extension Parser {
case nativeClassLayout

init?(lexeme: Lexer.Lexeme) {
switch lexeme {
switch PrepareForKeywordMatch(lexeme) {
case TokenSpec(._Trivial): self = .trivialLayout
case TokenSpec(._TrivialAtMost): self = .trivialAtMostLayout
case TokenSpec(._UnknownLayout): self = .unknownLayout
Expand Down Expand Up @@ -1494,7 +1494,7 @@ extension Parser {
if hasTryBeforeIntroducer && !value.is(RawTryExprSyntax.self) {
value = RawExprSyntax(
RawTryExprSyntax(
tryKeyword: missingToken(.keyword(.try), text: nil),
tryKeyword: missingToken(.try),
questionOrExclamationMark: nil,
expression: value,
arena: self.arena
Expand Down Expand Up @@ -1993,7 +1993,7 @@ extension Parser {
case lowerThan

init?(lexeme: Lexer.Lexeme) {
switch lexeme {
switch PrepareForKeywordMatch(lexeme) {
case TokenSpec(.associativity): self = .associativity
case TokenSpec(.assignment): self = .assignment
case TokenSpec(.higherThan): self = .higherThan
Expand Down
2 changes: 1 addition & 1 deletion Sources/SwiftParser/Directives.swift
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ extension Parser {
}

init?(lexeme: Lexer.Lexeme) {
switch lexeme {
switch PrepareForKeywordMatch(lexeme) {
case TokenSpec(.poundIfKeyword): self = .poundIfKeyword
case TokenSpec(.poundElseifKeyword): self = .poundElseifKeyword
case TokenSpec(.poundElseKeyword): self = .poundElseKeyword
Expand Down
4 changes: 2 additions & 2 deletions Sources/SwiftParser/Expressions.swift
Original file line number Diff line number Diff line change
Expand Up @@ -266,7 +266,7 @@ extension Parser {
case throwsKeyword

init?(lexeme: Lexer.Lexeme) {
switch lexeme {
switch PrepareForKeywordMatch(lexeme) {
case TokenSpec(.binaryOperator): self = .binaryOperator
case TokenSpec(.infixQuestionMark): self = .infixQuestionMark
case TokenSpec(.equal): self = .equal
Expand Down Expand Up @@ -2256,7 +2256,7 @@ extension Parser {
unknownAttr: nil,
label: .case(
RawSwitchCaseLabelSyntax(
caseKeyword: missingToken(.keyword(.case), text: nil),
caseKeyword: missingToken(.case),
caseItems: RawCaseItemListSyntax(
elements: [
RawCaseItemSyntax(
Expand Down
6 changes: 3 additions & 3 deletions Sources/SwiftParser/Lexer/Cursor.swift
Original file line number Diff line number Diff line change
Expand Up @@ -218,7 +218,7 @@ extension Lexer {
var input: UnsafeBufferPointer<UInt8>
var previous: UInt8
/// If we have already lexed a token, the kind of the previously lexed token
var previousTokenKind: RawTokenBaseKind?
var previousTokenKind: RawTokenKind?
private var stateStack: StateStack = StateStack()

init(input: UnsafeBufferPointer<UInt8>, previous: UInt8) {
Expand Down Expand Up @@ -357,7 +357,7 @@ extension Lexer.Cursor {
flags.insert(.isAtStartOfLine)
}

self.previousTokenKind = result.tokenKind.base
self.previousTokenKind = result.tokenKind
diagnostic = TokenDiagnostic(combining: diagnostic, result.error?.tokenDiagnostic(tokenStart: cursor))

return .init(
Expand Down Expand Up @@ -1900,7 +1900,7 @@ extension Lexer.Cursor {

let text = tokStart.text(upTo: self)
if let keyword = Keyword(text), keyword.isLexerClassified {
return Lexer.Result(.keyword(keyword))
return Lexer.Result(.keyword)
} else if text == "_" {
return Lexer.Result(.wildcard)
} else {
Expand Down
8 changes: 2 additions & 6 deletions Sources/SwiftParser/Modifiers.swift
Original file line number Diff line number Diff line change
Expand Up @@ -166,7 +166,7 @@ extension Parser {
}

init?(lexeme: Lexer.Lexeme) {
switch lexeme {
switch PrepareForKeywordMatch(lexeme) {
case TokenSpec(.private): self = .private
case TokenSpec(.fileprivate): self = .fileprivate
case TokenSpec(.internal): self = .internal
Expand Down Expand Up @@ -200,11 +200,7 @@ extension Parser {
(unexpectedBeforeDetail, detail) = eat(setHandle)
} else {
unexpectedBeforeDetail = nil
detail = RawTokenSyntax(
missing: .keyword(.set),
text: "set",
arena: arena
)
detail = missingToken(.set)
}
let (unexpectedBeforeRightParen, rightParen) = expect(.rightParen)

Expand Down
2 changes: 1 addition & 1 deletion Sources/SwiftParser/Names.swift
Original file line number Diff line number Diff line change
Expand Up @@ -282,7 +282,7 @@ extension Lexer.Lexeme {
// Only lexer-classified lexemes have `RawTokenKind` of `keyword.
// Contextual keywords will only be made keywords when a `RawTokenSyntax` is
// constructed from them.
return self.rawTokenKind.base == .keyword
return self.rawTokenKind == .keyword
}

func starts(with symbol: SyntaxText) -> Bool {
Expand Down
5 changes: 5 additions & 0 deletions Sources/SwiftParser/Parser.swift
Original file line number Diff line number Diff line change
Expand Up @@ -169,6 +169,11 @@ public struct Parser {
return RawTokenSyntax(missing: kind, text: text, arena: self.arena)
}

@_spi(RawSyntax)
public mutating func missingToken(_ keyword: Keyword) -> RawTokenSyntax {
return missingToken(.keyword, text: keyword.defaultText)
}

/// Consumes the current token and advances the lexer to the next token.
///
/// - Returns: The token that was consumed.
Expand Down
4 changes: 2 additions & 2 deletions Sources/SwiftParser/Patterns.swift
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ extension Parser {
case varKeyword

init?(lexeme: Lexer.Lexeme) {
switch lexeme {
switch PrepareForKeywordMatch(lexeme) {
case TokenSpec(.leftParen): self = .leftParen
case TokenSpec(.wildcard): self = .wildcard
case TokenSpec(.identifier): self = .identifier
Expand Down Expand Up @@ -296,7 +296,7 @@ extension Parser.Lookahead {
case leftParen

init?(lexeme: Lexer.Lexeme) {
switch lexeme {
switch PrepareForKeywordMatch(lexeme) {
case TokenSpec(.identifier): self = .identifier
case TokenSpec(.wildcard): self = .wildcard
case TokenSpec(.let): self = .letKeyword
Expand Down
Loading