From e60c7ae9c037dcc70fb0b600b1ec0c2d6d70b30e Mon Sep 17 00:00:00 2001 From: Patrick Date: Mon, 15 Jan 2024 17:24:02 +0800 Subject: [PATCH 01/74] add module name support for semantic tokens --- .../src/Ide/Plugin/SemanticTokens/Internal.hs | 32 +++++++++-------- .../src/Ide/Plugin/SemanticTokens/Mappings.hs | 1 + .../src/Ide/Plugin/SemanticTokens/Query.hs | 34 ++++++++++--------- .../src/Ide/Plugin/SemanticTokens/Types.hs | 8 +++-- .../src/Ide/Plugin/SemanticTokens/Utils.hs | 12 +++---- .../hls-semantic-tokens-plugin/test/Main.hs | 4 ++- .../test/testdata/TDatatypeImported.expected | 1 + 7 files changed, 52 insertions(+), 40 deletions(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs index 4c22af78db..1457b8e098 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs @@ -21,7 +21,6 @@ import Control.Monad.Except (ExceptT, liftEither, withExceptT) import Control.Monad.Trans (lift) import Control.Monad.Trans.Except (runExceptT) -import Data.Aeson (ToJSON (toJSON)) import qualified Data.Map as Map import Development.IDE (Action, GetDocMap (GetDocMap), @@ -34,7 +33,6 @@ import Development.IDE (Action, cmapWithPrio, define, fromNormalizedFilePath, hieKind, logPriority, - usePropertyAction, use_) import Development.IDE.Core.PluginUtils (runActionE, useWithStaleE) @@ -62,6 +60,9 @@ import Language.LSP.Protocol.Types (NormalizedFilePath, SemanticTokens, type (|?) (InL)) import Prelude hiding (span) +import qualified Data.Set as S +import Data.Map (Map) +import qualified Data.Map as M $mkSemanticConfigFunctions @@ -101,27 +102,28 @@ getSemanticTokensRule recorder = ast <- handleMaybe (LogNoAST $ show nfp) $ getAsts hieAst Map.!? (HiePath . mkFastString . fromNormalizedFilePath) nfp virtualFile <- handleMaybeM LogNoVF $ getVirtualFile nfp -- get current location from the old ones - let spanNamesMap = hieAstSpanNames virtualFile ast - let names = nameSetElemsStable $ unionNameSets $ Map.elems spanNamesMap + let spanIdMap = M.filter (not . null) $ hieAstSpanIdentifiers virtualFile ast + let names = S.toList $ S.unions $ Map.elems spanIdMap let localSemanticMap = mkLocalNameSemanticFromAst names (hieKindFunMasksKind hieKind) refMap -- get imported name semantic map - let importedNameSemanticMap = foldr (getTypeExclude localSemanticMap getTyThingMap) emptyNameEnv names - let sMap = plusNameEnv_C (<>) importedNameSemanticMap localSemanticMap - let rangeTokenType = extractSemanticTokensFromNames sMap spanNamesMap + let importedNameSemanticMap = foldr (getTypeExclude localSemanticMap getTyThingMap) mempty names + let sMap = M.unionWith (<>) importedNameSemanticMap localSemanticMap + let rangeTokenType = extractSemanticTokensFromNames sMap spanIdMap return $ RangeHsSemanticTokenTypes rangeTokenType where -- ignore one already in discovered in local getTypeExclude :: - NameEnv a -> + Map Identifier a -> NameEnv TyThing -> - Name -> - NameEnv HsSemanticTokenType -> - NameEnv HsSemanticTokenType + Identifier -> + Map Identifier HsSemanticTokenType -> + Map Identifier HsSemanticTokenType getTypeExclude localEnv tyThingMap n nameMap - | n `elemNameEnv` localEnv = nameMap - | otherwise = - let tyThing = lookupNameEnv tyThingMap n - in maybe nameMap (extendNameEnv nameMap n) (tyThing >>= tyThingSemantic) + | n `M.member` localEnv = nameMap + | (Right name) <- n = + let tyThing = lookupNameEnv tyThingMap name + in maybe nameMap (\k -> M.insert n k nameMap) (tyThing >>= tyThingSemantic) + | otherwise = nameMap -- | Persistent rule to ensure that semantic tokens doesn't block on startup persistentGetSemanticTokensRule :: Rules () diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Mappings.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Mappings.hs index 013d77a9e6..4af65554a4 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Mappings.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Mappings.hs @@ -45,6 +45,7 @@ toLspTokenType conf tk = case tk of TTypeFamily -> stTypeFamily conf TRecordField -> stRecordField conf TPatternSynonym -> stPatternSynonym conf + TModuleName -> stModuleName conf lspTokenReverseMap :: SemanticTokensConfig -> Map.Map SemanticTokenTypes HsSemanticTokenType lspTokenReverseMap config diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs index 174048049f..e35fe21ee0 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs @@ -21,13 +21,14 @@ import Development.IDE.GHC.Compat import Development.IDE.GHC.Error (realSrcSpanToCodePointRange) import Ide.Plugin.SemanticTokens.Mappings import Ide.Plugin.SemanticTokens.Types (HieFunMaskKind, - HsSemanticTokenType, + HsSemanticTokenType (TModuleName), NameSemanticMap, SemanticTokensConfig) import Language.LSP.Protocol.Types import Language.LSP.VFS (VirtualFile, codePointRangeToRange) import Prelude hiding (span) +import Data.Set (Set) --------------------------------------------------------- @@ -35,17 +36,18 @@ import Prelude hiding (span) --------------------------------------------------------- -mkLocalNameSemanticFromAst :: [Name] -> HieFunMaskKind a -> RefMap a -> NameSemanticMap -mkLocalNameSemanticFromAst names hieKind rm = mkNameEnv (mapMaybe (nameNameSemanticFromHie hieKind rm) names) +mkLocalNameSemanticFromAst :: [Identifier] -> HieFunMaskKind a -> RefMap a -> NameSemanticMap +mkLocalNameSemanticFromAst names hieKind rm = M.fromList (mapMaybe (nameNameSemanticFromHie hieKind rm) names) -nameNameSemanticFromHie :: forall a. HieFunMaskKind a -> RefMap a -> Name -> Maybe (Name, HsSemanticTokenType) -nameNameSemanticFromHie hieKind rm ns = do +nameNameSemanticFromHie :: forall a. HieFunMaskKind a -> RefMap a -> Identifier -> Maybe (Identifier, HsSemanticTokenType) +nameNameSemanticFromHie _ _ ns@(Left _) = Just (ns, TModuleName) +nameNameSemanticFromHie hieKind rm ns@(Right _) = do st <- nameSemanticFromRefMap rm ns return (ns, st) where - nameSemanticFromRefMap :: RefMap a -> Name -> Maybe HsSemanticTokenType + nameSemanticFromRefMap :: RefMap a -> Identifier -> Maybe HsSemanticTokenType nameSemanticFromRefMap rm' name' = do - spanInfos <- Map.lookup (Right name') rm' + spanInfos <- Map.lookup name' rm' let typeTokenType = foldMap (typeSemantic hieKind) $ listToMaybe $ mapMaybe (identType . snd) spanInfos contextInfoTokenType <- foldMap (contextInfosMaybeTokenType . identInfo . snd) spanInfos fold [typeTokenType, Just contextInfoTokenType] @@ -62,11 +64,11 @@ nameNameSemanticFromHie hieKind rm ns = do -- | get only visible names from HieAST -- we care only the leaf node of the AST -- and filter out the derived and evidence names -hieAstSpanNames :: VirtualFile -> HieAST a -> M.Map Range NameSet -hieAstSpanNames vf ast = +hieAstSpanIdentifiers :: VirtualFile -> HieAST a -> M.Map Range (Set Identifier) +hieAstSpanIdentifiers vf ast = if null (nodeChildren ast) then getIds ast - else M.unionsWith unionNameSet $ map (hieAstSpanNames vf) (nodeChildren ast) + else M.unionsWith S.union $ map (hieAstSpanIdentifiers vf) (nodeChildren ast) where getIds ast' = fromMaybe mempty $ do range <- codePointRangeToRange vf $ realSrcSpanToCodePointRange $ nodeSpan ast' @@ -76,15 +78,15 @@ hieAstSpanNames vf ast = . Map.filterWithKey (\k _ -> k == SourceInfo) . getSourcedNodeInfo . sourcedNodeInfo - combineNodeIds :: NameSet -> NodeInfo a -> NameSet - ad `combineNodeIds` (NodeInfo _ _ bd) = ad `unionNameSet` xs + combineNodeIds :: Set Identifier -> NodeInfo a -> Set Identifier + ad `combineNodeIds` (NodeInfo _ _ bd) = ad `S.union` xs where - xs = mkNameSet $ rights $ M.keys $ M.filterWithKey inclusion bd + xs = S.fromList $ M.keys $ M.filterWithKey inclusion bd inclusion :: Identifier -> IdentifierDetails a -> Bool inclusion a b = not $ exclusion a b exclusion :: Identifier -> IdentifierDetails a -> Bool exclusion idt IdentifierDetails {identInfo = infos} = case idt of - Left _ -> True + Left _ -> False Right _ -> any isEvidenceContext (S.toList infos) ------------------------------------------------- @@ -93,8 +95,8 @@ hieAstSpanNames vf ast = ------------------------------------------------- -extractSemanticTokensFromNames :: NameSemanticMap -> M.Map Range NameSet -> M.Map Range HsSemanticTokenType -extractSemanticTokensFromNames nsm = Map.mapMaybe (foldMap (lookupNameEnv nsm) . nameSetElemsStable) +extractSemanticTokensFromNames :: NameSemanticMap -> M.Map Range (Set Identifier) -> M.Map Range HsSemanticTokenType +extractSemanticTokensFromNames nsm = Map.mapMaybe (foldMap (flip M.lookup nsm)) rangeSemanticMapSemanticTokens :: SemanticTokensConfig -> PositionMapping -> M.Map Range HsSemanticTokenType -> Either Text SemanticTokens rangeSemanticMapSemanticTokens stc mapping = diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Types.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Types.hs index 5be028ace8..81c689cb70 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Types.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Types.hs @@ -26,6 +26,7 @@ import GHC.Generics (Generic) import Language.LSP.Protocol.Types -- import template haskell import Language.Haskell.TH.Syntax (Lift) +import Data.Map (Map) -- !!!! order of declarations matters deriving enum and ord @@ -43,6 +44,7 @@ data HsSemanticTokenType | TTypeSynonym -- Type synonym | TTypeFamily -- type family | TRecordField -- from match bind + | TModuleName -- from match bind deriving (Eq, Ord, Show, Enum, Bounded, Generic, Lift) @@ -65,6 +67,7 @@ instance Default SemanticTokensConfig where , stTypeSynonym = SemanticTokenTypes_Type , stTypeFamily = SemanticTokenTypes_Interface , stRecordField = SemanticTokenTypes_Property + , stModuleName = SemanticTokenTypes_Namespace } -- | SemanticTokensConfig_ is a configuration for the semantic tokens plugin. -- it contains map between the hs semantic token type and default token type. @@ -80,6 +83,7 @@ data SemanticTokensConfig = STC , stTypeSynonym :: !SemanticTokenTypes , stTypeFamily :: !SemanticTokenTypes , stRecordField :: !SemanticTokenTypes + , stModuleName :: !SemanticTokenTypes } deriving (Generic, Show) @@ -108,7 +112,7 @@ data Loc = Loc instance Show Loc where show (Loc line startChar len) = show line <> ":" <> show startChar <> "-" <> show (startChar + len) -type NameSemanticMap = NameEnv HsSemanticTokenType +type NameSemanticMap = Map Identifier HsSemanticTokenType data GetSemanticTokens = GetSemanticTokens deriving (Eq, Show, Typeable, Generic) @@ -124,7 +128,7 @@ instance NFData RangeHsSemanticTokenTypes where rnf (RangeHsSemanticTokenTypes a) = rwhnf a instance Show RangeHsSemanticTokenTypes where - show = const "GlobalNameMap" + show = const "RangeHsSemanticTokenTypes" type instance RuleResult GetSemanticTokens = RangeHsSemanticTokenTypes diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs index fb29c14729..42223c891f 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs @@ -83,12 +83,12 @@ nameTypesString xs = unlines | (span, name) <- xs] -nameMapString :: NameSemanticMap -> [Name] -> String -nameMapString nsm names = unlines - [ showSDocUnsafe (ppr name) ++ " " ++ show tokenType - | name <- names - , let tokenType = lookupNameEnv nsm name - ] +-- nameMapString :: NameSemanticMap -> [Name] -> String +-- nameMapString nsm names = unlines +-- [ showSDocUnsafe (ppr name) ++ " " ++ show tokenType +-- | name <- names +-- , let tokenType = lookupNameEnv nsm name +-- ] showSpan :: RealSrcSpan -> String diff --git a/plugins/hls-semantic-tokens-plugin/test/Main.hs b/plugins/hls-semantic-tokens-plugin/test/Main.hs index ef8482081a..4e9bc68eb8 100644 --- a/plugins/hls-semantic-tokens-plugin/test/Main.hs +++ b/plugins/hls-semantic-tokens-plugin/test/Main.hs @@ -167,7 +167,9 @@ semanticTokensTests = let file1 = "TModuleA.hs" let file2 = "TModuleB.hs" let expect = - [ SemanticTokenOriginal TVariable (Loc 5 1 2) "go", + [ + SemanticTokenOriginal TModuleName (Loc 3 8 8) "TModuleA", + SemanticTokenOriginal TVariable (Loc 5 1 2) "go", SemanticTokenOriginal TDataConstructor (Loc 5 6 4) "Game" ] Test.Hls.runSessionWithServerInTmpDir def semanticTokensPlugin (mkFs $ FS.directProjectMulti [file1, file2]) $ do diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/TDatatypeImported.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/TDatatypeImported.expected index 9c2118cd3a..a59308400d 100644 --- a/plugins/hls-semantic-tokens-plugin/test/testdata/TDatatypeImported.expected +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/TDatatypeImported.expected @@ -1,3 +1,4 @@ +3:8-17 TModuleName "System.IO" 5:1-3 TVariable "go" 5:7-9 TTypeConstructor "IO" 6:1-3 TVariable "go" From 13793827e9f925f382d99fdb253e0a9783dd9729 Mon Sep 17 00:00:00 2001 From: Patrick Date: Mon, 15 Jan 2024 17:29:21 +0800 Subject: [PATCH 02/74] cleanup --- .../src/Ide/Plugin/SemanticTokens/Mappings.hs | 4 ++-- .../src/Ide/Plugin/SemanticTokens/Types.hs | 2 +- .../src/Ide/Plugin/SemanticTokens/Utils.hs | 9 --------- 3 files changed, 3 insertions(+), 12 deletions(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Mappings.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Mappings.hs index 4af65554a4..447a1e025a 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Mappings.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Mappings.hs @@ -115,9 +115,9 @@ recoverFunMaskArray flattened = unflattened -- The recursion in 'unflattened' is crucial - it's what gives us sharing -- function indicator check. unflattened :: A.Array TypeIndex Bool - unflattened = fmap (\flatTy -> go (fmap (unflattened A.!) flatTy)) flattened + unflattened = fmap (go . fmap (unflattened A.!)) flattened - -- Unfold an 'HieType' whose subterms have already been unfolded + -- Unfold an 'HieType' whose sub-terms have already been unfolded go :: HieType Bool -> Bool go (HTyVarTy _name) = False go (HAppTy _f _x) = False diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Types.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Types.hs index 81c689cb70..985fb2ee0e 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Types.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Types.hs @@ -121,7 +121,7 @@ instance Hashable GetSemanticTokens instance NFData GetSemanticTokens -data RangeHsSemanticTokenTypes = RangeHsSemanticTokenTypes {rangeSemanticMap :: M.Map Range HsSemanticTokenType} +newtype RangeHsSemanticTokenTypes = RangeHsSemanticTokenTypes {rangeSemanticMap :: M.Map Range HsSemanticTokenType} instance NFData RangeHsSemanticTokenTypes where rnf :: RangeHsSemanticTokenTypes -> () diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs index 42223c891f..d25e752b04 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs @@ -10,7 +10,6 @@ import Data.ByteString.Char8 (unpack) import qualified Data.Map as Map import Development.IDE (Position (..), Range (..)) import Development.IDE.GHC.Compat -import Ide.Plugin.SemanticTokens.Types import Prelude hiding (span) deriving instance Show DeclType @@ -83,14 +82,6 @@ nameTypesString xs = unlines | (span, name) <- xs] --- nameMapString :: NameSemanticMap -> [Name] -> String --- nameMapString nsm names = unlines --- [ showSDocUnsafe (ppr name) ++ " " ++ show tokenType --- | name <- names --- , let tokenType = lookupNameEnv nsm name --- ] - - showSpan :: RealSrcSpan -> String showSpan x = show (srcSpanStartLine x) <> ":" <> show (srcSpanStartCol x) <> "-" <> show (srcSpanEndCol x) From b2cbb8c14474948a355aa35860d7cfe0e9ea470f Mon Sep 17 00:00:00 2001 From: Patrick Date: Mon, 15 Jan 2024 17:32:17 +0800 Subject: [PATCH 03/74] cleanup --- .../src/Ide/Plugin/SemanticTokens/Mappings.hs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Mappings.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Mappings.hs index 447a1e025a..0c044ac61a 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Mappings.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Mappings.hs @@ -123,7 +123,7 @@ recoverFunMaskArray flattened = unflattened go (HAppTy _f _x) = False go (HLitTy _lit) = False go (HForAllTy ((_n, _k), _af) b) = b - go (HFunTy _ _ _) = True + go (HFunTy {}) = True go (HQualTy _constraint b) = b go (HCastTy b) = b go HCoercionTy = False From 7896b67521ddd20d185adfe79679874988755887 Mon Sep 17 00:00:00 2001 From: Patrick Date: Mon, 15 Jan 2024 17:47:03 +0800 Subject: [PATCH 04/74] fix docName --- .../hls-semantic-tokens-plugin.cabal | 4 ++-- .../src/Ide/Plugin/SemanticTokens/SemanticConfig.hs | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/plugins/hls-semantic-tokens-plugin/hls-semantic-tokens-plugin.cabal b/plugins/hls-semantic-tokens-plugin/hls-semantic-tokens-plugin.cabal index d3cd5ee6fc..f05f6dfb59 100644 --- a/plugins/hls-semantic-tokens-plugin/hls-semantic-tokens-plugin.cabal +++ b/plugins/hls-semantic-tokens-plugin/hls-semantic-tokens-plugin.cabal @@ -20,7 +20,7 @@ source-repository head location: https://github.com/haskell/haskell-language-server.git library - ghc-options: -Wall + ghc-options: -Wall -Wincomplete-patterns -Werror=incomplete-patterns buildable: True exposed-modules: Ide.Plugin.SemanticTokens @@ -61,7 +61,7 @@ library test-suite tests type: exitcode-stdio-1.0 - ghc-options: -Wall + ghc-options: -Wall -Wincomplete-patterns -Werror=incomplete-patterns default-language: Haskell2010 hs-source-dirs: test main-is: Main.hs diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs index 7afcc879da..21574bf2c1 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs @@ -34,6 +34,7 @@ docName tt = case tt of TTypeSynonym -> "type synonyms" TTypeFamily -> "type families" TRecordField -> "record fields" + TModuleName -> "module names" toConfigName :: String -> String toConfigName = ("st" <>) From 422fc95183a435464af10fed9bd1336bbea9a658 Mon Sep 17 00:00:00 2001 From: Patrick Date: Mon, 15 Jan 2024 18:44:34 +0800 Subject: [PATCH 05/74] regenerate the config test result --- .../schema/ghc92/default-config.golden.json | 1 + .../ghc92/vscode-extension-schema.golden.json | 56 +++++++++++++++++++ .../schema/ghc94/default-config.golden.json | 1 + .../ghc94/vscode-extension-schema.golden.json | 56 +++++++++++++++++++ .../schema/ghc96/default-config.golden.json | 1 + .../ghc96/vscode-extension-schema.golden.json | 56 +++++++++++++++++++ .../schema/ghc98/default-config.golden.json | 1 + .../ghc98/vscode-extension-schema.golden.json | 56 +++++++++++++++++++ 8 files changed, 228 insertions(+) diff --git a/test/testdata/schema/ghc92/default-config.golden.json b/test/testdata/schema/ghc92/default-config.golden.json index e55282483d..a5cbc48064 100644 --- a/test/testdata/schema/ghc92/default-config.golden.json +++ b/test/testdata/schema/ghc92/default-config.golden.json @@ -122,6 +122,7 @@ "classToken": "class", "dataConstructorToken": "enumMember", "functionToken": "function", + "moduleNameToken": "namespace", "patternSynonymToken": "macro", "recordFieldToken": "property", "typeConstructorToken": "enum", diff --git a/test/testdata/schema/ghc92/vscode-extension-schema.golden.json b/test/testdata/schema/ghc92/vscode-extension-schema.golden.json index 844079ff9b..f8d84cbea9 100644 --- a/test/testdata/schema/ghc92/vscode-extension-schema.golden.json +++ b/test/testdata/schema/ghc92/vscode-extension-schema.golden.json @@ -485,6 +485,62 @@ "scope": "resource", "type": "string" }, + "haskell.plugin.semanticTokens.config.moduleNameToken": { + "default": "namespace", + "description": "LSP semantic token type to use for module names", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, "haskell.plugin.semanticTokens.config.patternSynonymToken": { "default": "macro", "description": "LSP semantic token type to use for pattern synonyms", diff --git a/test/testdata/schema/ghc94/default-config.golden.json b/test/testdata/schema/ghc94/default-config.golden.json index e792c5fe8b..15c28e5c6c 100644 --- a/test/testdata/schema/ghc94/default-config.golden.json +++ b/test/testdata/schema/ghc94/default-config.golden.json @@ -122,6 +122,7 @@ "classToken": "class", "dataConstructorToken": "enumMember", "functionToken": "function", + "moduleNameToken": "namespace", "patternSynonymToken": "macro", "recordFieldToken": "property", "typeConstructorToken": "enum", diff --git a/test/testdata/schema/ghc94/vscode-extension-schema.golden.json b/test/testdata/schema/ghc94/vscode-extension-schema.golden.json index fe3b42bfdf..6ee77630b8 100644 --- a/test/testdata/schema/ghc94/vscode-extension-schema.golden.json +++ b/test/testdata/schema/ghc94/vscode-extension-schema.golden.json @@ -485,6 +485,62 @@ "scope": "resource", "type": "string" }, + "haskell.plugin.semanticTokens.config.moduleNameToken": { + "default": "namespace", + "description": "LSP semantic token type to use for module names", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, "haskell.plugin.semanticTokens.config.patternSynonymToken": { "default": "macro", "description": "LSP semantic token type to use for pattern synonyms", diff --git a/test/testdata/schema/ghc96/default-config.golden.json b/test/testdata/schema/ghc96/default-config.golden.json index e792c5fe8b..15c28e5c6c 100644 --- a/test/testdata/schema/ghc96/default-config.golden.json +++ b/test/testdata/schema/ghc96/default-config.golden.json @@ -122,6 +122,7 @@ "classToken": "class", "dataConstructorToken": "enumMember", "functionToken": "function", + "moduleNameToken": "namespace", "patternSynonymToken": "macro", "recordFieldToken": "property", "typeConstructorToken": "enum", diff --git a/test/testdata/schema/ghc96/vscode-extension-schema.golden.json b/test/testdata/schema/ghc96/vscode-extension-schema.golden.json index fe3b42bfdf..6ee77630b8 100644 --- a/test/testdata/schema/ghc96/vscode-extension-schema.golden.json +++ b/test/testdata/schema/ghc96/vscode-extension-schema.golden.json @@ -485,6 +485,62 @@ "scope": "resource", "type": "string" }, + "haskell.plugin.semanticTokens.config.moduleNameToken": { + "default": "namespace", + "description": "LSP semantic token type to use for module names", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, "haskell.plugin.semanticTokens.config.patternSynonymToken": { "default": "macro", "description": "LSP semantic token type to use for pattern synonyms", diff --git a/test/testdata/schema/ghc98/default-config.golden.json b/test/testdata/schema/ghc98/default-config.golden.json index b42d8f4e51..fe1725f4fb 100644 --- a/test/testdata/schema/ghc98/default-config.golden.json +++ b/test/testdata/schema/ghc98/default-config.golden.json @@ -115,6 +115,7 @@ "classToken": "class", "dataConstructorToken": "enumMember", "functionToken": "function", + "moduleNameToken": "namespace", "patternSynonymToken": "macro", "recordFieldToken": "property", "typeConstructorToken": "enum", diff --git a/test/testdata/schema/ghc98/vscode-extension-schema.golden.json b/test/testdata/schema/ghc98/vscode-extension-schema.golden.json index 861b8a37e0..8703df1a2b 100644 --- a/test/testdata/schema/ghc98/vscode-extension-schema.golden.json +++ b/test/testdata/schema/ghc98/vscode-extension-schema.golden.json @@ -467,6 +467,62 @@ "scope": "resource", "type": "string" }, + "haskell.plugin.semanticTokens.config.moduleNameToken": { + "default": "namespace", + "description": "LSP semantic token type to use for module names", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, "haskell.plugin.semanticTokens.config.patternSynonymToken": { "default": "macro", "description": "LSP semantic token type to use for pattern synonyms", From b2e0f311912eabbd7494c51c5b5b26bea9454f81 Mon Sep 17 00:00:00 2001 From: Patrick Date: Tue, 16 Jan 2024 04:43:14 +0800 Subject: [PATCH 06/74] stylish --- .../src/Ide/Plugin/SemanticTokens/Internal.hs | 6 +++--- .../src/Ide/Plugin/SemanticTokens/Mappings.hs | 4 ++-- .../src/Ide/Plugin/SemanticTokens/Query.hs | 2 +- .../src/Ide/Plugin/SemanticTokens/Types.hs | 2 +- .../src/Ide/Plugin/SemanticTokens/Utils.hs | 10 +++++----- 5 files changed, 12 insertions(+), 12 deletions(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs index 1457b8e098..846e162503 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs @@ -21,7 +21,10 @@ import Control.Monad.Except (ExceptT, liftEither, withExceptT) import Control.Monad.Trans (lift) import Control.Monad.Trans.Except (runExceptT) +import Data.Map (Map) +import qualified Data.Map as M import qualified Data.Map as Map +import qualified Data.Set as S import Development.IDE (Action, GetDocMap (GetDocMap), GetHieAst (GetHieAst), @@ -60,9 +63,6 @@ import Language.LSP.Protocol.Types (NormalizedFilePath, SemanticTokens, type (|?) (InL)) import Prelude hiding (span) -import qualified Data.Set as S -import Data.Map (Map) -import qualified Data.Map as M $mkSemanticConfigFunctions diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Mappings.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Mappings.hs index 0c044ac61a..5fb0f46161 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Mappings.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Mappings.hs @@ -45,7 +45,7 @@ toLspTokenType conf tk = case tk of TTypeFamily -> stTypeFamily conf TRecordField -> stRecordField conf TPatternSynonym -> stPatternSynonym conf - TModuleName -> stModuleName conf + TModuleName -> stModuleName conf lspTokenReverseMap :: SemanticTokensConfig -> Map.Map SemanticTokenTypes HsSemanticTokenType lspTokenReverseMap config @@ -123,7 +123,7 @@ recoverFunMaskArray flattened = unflattened go (HAppTy _f _x) = False go (HLitTy _lit) = False go (HForAllTy ((_n, _k), _af) b) = b - go (HFunTy {}) = True + go (HFunTy {}) = True go (HQualTy _constraint b) = b go (HCastTy b) = b go HCoercionTy = False diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs index e35fe21ee0..b28e8dd60a 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs @@ -12,6 +12,7 @@ import qualified Data.Map as M import qualified Data.Map as Map import Data.Maybe (fromMaybe, listToMaybe, mapMaybe) +import Data.Set (Set) import qualified Data.Set as S import qualified Data.Set as Set import Data.Text (Text) @@ -28,7 +29,6 @@ import Language.LSP.Protocol.Types import Language.LSP.VFS (VirtualFile, codePointRangeToRange) import Prelude hiding (span) -import Data.Set (Set) --------------------------------------------------------- diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Types.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Types.hs index 985fb2ee0e..8bfbc23156 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Types.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Types.hs @@ -25,8 +25,8 @@ import Development.IDE.Graph.Classes (Hashable) import GHC.Generics (Generic) import Language.LSP.Protocol.Types -- import template haskell +import Data.Map (Map) import Language.Haskell.TH.Syntax (Lift) -import Data.Map (Map) -- !!!! order of declarations matters deriving enum and ord diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs index d25e752b04..f5b0b167c0 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs @@ -5,12 +5,12 @@ module Ide.Plugin.SemanticTokens.Utils where -import Data.ByteString (ByteString) -import Data.ByteString.Char8 (unpack) -import qualified Data.Map as Map -import Development.IDE (Position (..), Range (..)) +import Data.ByteString (ByteString) +import Data.ByteString.Char8 (unpack) +import qualified Data.Map as Map +import Development.IDE (Position (..), Range (..)) import Development.IDE.GHC.Compat -import Prelude hiding (span) +import Prelude hiding (span) deriving instance Show DeclType deriving instance Show BindType From 388ff4be0ce1ce8afd94b3d59e173ad179a707fd Mon Sep 17 00:00:00 2001 From: Patrick Date: Wed, 17 Jan 2024 17:45:23 +0800 Subject: [PATCH 07/74] mend split modulename for qualified name token --- ghcide/src/Development/IDE/GHC/Compat.hs | 1 + .../hls-semantic-tokens-plugin.cabal | 1 + .../src/Ide/Plugin/SemanticTokens/Internal.hs | 6 +- .../src/Ide/Plugin/SemanticTokens/Query.hs | 79 ++++++++++++------- .../src/Ide/Plugin/SemanticTokens/Utils.hs | 60 +++++++++++++- .../test/testdata/T1.expected | 6 +- 6 files changed, 120 insertions(+), 33 deletions(-) diff --git a/ghcide/src/Development/IDE/GHC/Compat.hs b/ghcide/src/Development/IDE/GHC/Compat.hs index 12c3fb346e..8cdc22cdf4 100644 --- a/ghcide/src/Development/IDE/GHC/Compat.hs +++ b/ghcide/src/Development/IDE/GHC/Compat.hs @@ -38,6 +38,7 @@ module Development.IDE.GHC.Compat( FastStringCompat, bytesFS, mkFastStringByteString, + lengthFS, nodeInfo', getNodeIds, sourceNodeInfo, diff --git a/plugins/hls-semantic-tokens-plugin/hls-semantic-tokens-plugin.cabal b/plugins/hls-semantic-tokens-plugin/hls-semantic-tokens-plugin.cabal index f05f6dfb59..ee52848479 100644 --- a/plugins/hls-semantic-tokens-plugin/hls-semantic-tokens-plugin.cabal +++ b/plugins/hls-semantic-tokens-plugin/hls-semantic-tokens-plugin.cabal @@ -37,6 +37,7 @@ library , aeson , base >=4.12 && <5 , containers + , text-rope , extra , hiedb , mtl >= 2.2 diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs index 846e162503..48d8ac63fa 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs @@ -1,16 +1,19 @@ {-# LANGUAGE DataKinds #-} {-# LANGUAGE DerivingStrategies #-} -{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE PolyKinds #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE NamedFieldPuns #-} {-# LANGUAGE OverloadedLabels #-} {-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE UnicodeSyntax #-} +{-# OPTIONS_GHC -Wno-missing-signatures #-} -- | -- This module provides the core functionality of the plugin. @@ -92,6 +95,7 @@ semanticTokensFull recorder state pid param = do -- Local names token type from 'hieAst' -- Name locations from 'hieAst' -- Visible names from 'tmrRenamed' + -- -- It then combines this information to compute the semantic tokens for the file. getSemanticTokensRule :: Recorder (WithPriority SemanticLog) -> Rules () diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs index b28e8dd60a..e79657af97 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs @@ -1,34 +1,49 @@ -{-# LANGUAGE ExplicitNamespaces #-} -{-# LANGUAGE RankNTypes #-} +{-# LANGUAGE ExplicitNamespaces #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RankNTypes #-} {-# LANGUAGE ScopedTypeVariables #-} -- | -- The query module is used to query the semantic tokens from the AST module Ide.Plugin.SemanticTokens.Query where -import Data.Either (rights) -import Data.Foldable (fold) -import qualified Data.Map as M -import qualified Data.Map as Map -import Data.Maybe (fromMaybe, listToMaybe, - mapMaybe) -import Data.Set (Set) -import qualified Data.Set as S -import qualified Data.Set as Set -import Data.Text (Text) -import Development.IDE.Core.PositionMapping (PositionMapping, - toCurrentRange) -import Development.IDE.GHC.Compat -import Development.IDE.GHC.Error (realSrcSpanToCodePointRange) -import Ide.Plugin.SemanticTokens.Mappings -import Ide.Plugin.SemanticTokens.Types (HieFunMaskKind, - HsSemanticTokenType (TModuleName), - NameSemanticMap, - SemanticTokensConfig) -import Language.LSP.Protocol.Types -import Language.LSP.VFS (VirtualFile, - codePointRangeToRange) -import Prelude hiding (span) +import Data.Foldable (fold) +import qualified Data.Map as M +import qualified Data.Map as Map +import Data.Maybe + ( fromMaybe, + listToMaybe, + mapMaybe, + ) +import Data.Set (Set) +import qualified Data.Set as S +import qualified Data.Set as Set +import Data.Text (Text) +import Development.IDE.Core.PositionMapping + ( PositionMapping, + toCurrentRange, + ) +import Development.IDE.GHC.Compat +import Development.IDE.GHC.Error (realSrcSpanToCodePointRange) +import Ide.Plugin.SemanticTokens.Mappings +import Ide.Plugin.SemanticTokens.Types + ( HieFunMaskKind, + HsSemanticTokenType (TModuleName), + NameSemanticMap, + SemanticTokensConfig, + ) +import Ide.Plugin.SemanticTokens.Utils (splitModuleNameAndOccName) +import Language.LSP.Protocol.Types + ( Position (Position), + Range (Range), + SemanticTokenAbsolute (SemanticTokenAbsolute), + SemanticTokens, + defaultSemanticTokensLegend, + makeSemanticTokens, + ) +import Language.LSP.VFS +import Prelude hiding (length, span) --------------------------------------------------------- @@ -72,7 +87,10 @@ hieAstSpanIdentifiers vf ast = where getIds ast' = fromMaybe mempty $ do range <- codePointRangeToRange vf $ realSrcSpanToCodePointRange $ nodeSpan ast' - return $ M.singleton range (getNodeIds' ast') + return $ + M.fromListWith + (<>) + [S.singleton <$> ri | idt <- S.toList (getNodeIds' ast'), ri <- splitModuleNameAndOccName vf range idt] getNodeIds' = Map.foldl' combineNodeIds mempty . Map.filterWithKey (\k _ -> k == SourceInfo) @@ -86,9 +104,10 @@ hieAstSpanIdentifiers vf ast = inclusion a b = not $ exclusion a b exclusion :: Identifier -> IdentifierDetails a -> Bool exclusion idt IdentifierDetails {identInfo = infos} = case idt of - Left _ -> False + Left _ -> False Right _ -> any isEvidenceContext (S.toList infos) + ------------------------------------------------- -- * extract semantic tokens from NameSemanticMap @@ -96,7 +115,7 @@ hieAstSpanIdentifiers vf ast = ------------------------------------------------- extractSemanticTokensFromNames :: NameSemanticMap -> M.Map Range (Set Identifier) -> M.Map Range HsSemanticTokenType -extractSemanticTokensFromNames nsm = Map.mapMaybe (foldMap (flip M.lookup nsm)) +extractSemanticTokensFromNames nsm = Map.mapMaybe (foldMap (`M.lookup` nsm)) rangeSemanticMapSemanticTokens :: SemanticTokensConfig -> PositionMapping -> M.Map Range HsSemanticTokenType -> Either Text SemanticTokens rangeSemanticMapSemanticTokens stc mapping = @@ -106,7 +125,9 @@ rangeSemanticMapSemanticTokens stc mapping = . M.mapKeys (toCurrentRange mapping) where toAbsSemanticToken :: Range -> HsSemanticTokenType -> SemanticTokenAbsolute - toAbsSemanticToken (Range (Position startLine startColumn) (Position _endLine endColumn)) tokenType = + toAbsSemanticToken (Range (Language.LSP.Protocol.Types.Position startLine startColumn) + (Language.LSP.Protocol.Types.Position _endLine endColumn)) + tokenType = let len = endColumn - startColumn in SemanticTokenAbsolute (fromIntegral startLine) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs index f5b0b167c0..ccf2fe3206 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs @@ -1,5 +1,9 @@ {-# LANGUAGE StandaloneDeriving #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE PatternGuards #-} +{-# LANGUAGE ViewPatterns #-} {-# OPTIONS_GHC -Wno-orphans #-} @@ -10,7 +14,14 @@ import Data.ByteString.Char8 (unpack) import qualified Data.Map as Map import Development.IDE (Position (..), Range (..)) import Development.IDE.GHC.Compat -import Prelude hiding (span) +import Prelude hiding (length, span) +import Development.IDE.GHC.Compat.Util (mkFastString) +import Language.LSP.VFS (VirtualFile (VirtualFile), _file_text) +import qualified Data.Text.Utf16.Rope as Rope +import Data.Text.Utf16.Rope (Rope, splitAtPosition) +import Data.Text (breakOnEnd, length, Text) +import Control.Monad (guard) +import qualified Data.List deriving instance Show DeclType deriving instance Show BindType @@ -90,3 +101,50 @@ showSpan x = show (srcSpanStartLine x) <> ":" <> show (srcSpanStartCol x) <> "-" mkRange :: (Integral a1, Integral a2) => a1 -> a2 -> a2 -> Range mkRange startLine startCol len = Range (Position (fromIntegral startLine) (fromIntegral startCol)) (Position (fromIntegral startLine) (fromIntegral $ startCol + len)) + + +-- nameLength is in code points unit. +-- while Range might not in code points unit. +-- but we can still use it to get the length +-- since it is only used to exclude some names +-- currently, we only break `(ModuleA.b)` into `(ModuleA` and `.b)` +splitModuleNameAndOccName :: VirtualFile -> Range -> Identifier -> [(Range,Identifier)] +splitModuleNameAndOccName _ ran (Left m) = [(ran, Left m)] +splitModuleNameAndOccName vf ran@(Range (Position startLine startColumn) (Position _endLine endColumn)) (Right name) + | nameLength name < fromIntegral (endColumn - startColumn), (Just prefixLen) <- peekPrefix vf ran = + [(Range (Position startLine startColumn) (Position startLine (startColumn + fromIntegral prefixLen)) + , Left (ModuleName $ mkFastString "")), -- we do not need the module name + (Range (Position startLine (startColumn + fromIntegral prefixLen)) (Position startLine endColumn), Right name)] + | otherwise = [(ran, Right name)] + +nameLength :: Name -> Int +nameLength = lengthFS . occNameFS . nameOccName + +-- | peek at the prefix of a range, +-- if it is a qualified name, return the length of the module name. +-- module name everything before the last dot. +peekPrefix :: VirtualFile -> Range -> Maybe Int +peekPrefix rp ran = do + token <- getTextByCodePointRangeFromVfs rp ran + let prefixLen = length $ fst $ breakOnEnd "." token + guard $ prefixLen > 0 + return prefixLen + +-- | get the text from a range in a virtual file +getTextByCodePointRangeFromVfs :: VirtualFile -> Range -> Maybe Text +getTextByCodePointRangeFromVfs vf ra = do + let rp = vf._file_text + let (pos, len) = rangeToPositionLength ra + (_, suffix) <- splitAtPosition (codePointPositionRopePosition pos) rp + (prefix, _) <- Rope.splitAt len suffix + let token = Rope.toText prefix + return token + where + rangeToPositionLength :: (Integral l) => Range -> (Position, l) + rangeToPositionLength (Range beginPos@(Position _ startColumn) (Position _ endColumn)) = + (beginPos, fromIntegral $ endColumn - startColumn) + codePointPositionRopePosition :: Position -> Rope.Position + codePointPositionRopePosition (Position line column) = do + let line' = fromIntegral line + let column' = fromIntegral column + Rope.Position line' column' diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/T1.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/T1.expected index 062d4749d3..d3c9224b1a 100644 --- a/plugins/hls-semantic-tokens-plugin/test/testdata/T1.expected +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/T1.expected @@ -22,7 +22,8 @@ 21:7-10 TPatternSynonym "One" 23:6-9 TTypeConstructor "Doo" 23:12-15 TDataConstructor "Doo" -23:16-27 TTypeConstructor "Prelude.Int" +23:16-24 TModuleName "Prelude." +23:24-27 TTypeConstructor "Int" 24:6-10 TTypeSynonym "Bar1" 24:13-16 TTypeConstructor "Int" 25:6-10 TTypeSynonym "Bar2" @@ -72,7 +73,8 @@ 41:1-3 TFunction "go" 41:6-9 TRecordField "foo" 42:1-4 TFunction "add" -42:7-18 TClassMethod "(Prelude.+)" +42:7-16 TModuleName "(Prelude." +42:16-18 TClassMethod "+)" 47:1-5 TVariable "main" 47:9-11 TTypeConstructor "IO" 48:1-5 TVariable "main" From 4b785555652ef5654d675e6a99b1c2b82af4cca5 Mon Sep 17 00:00:00 2001 From: Patrick Date: Wed, 17 Jan 2024 18:04:35 +0800 Subject: [PATCH 08/74] rename --- .../src/Ide/Plugin/SemanticTokens/Utils.hs | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs index ccf2fe3206..301f588f63 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs @@ -16,12 +16,11 @@ import Development.IDE (Position (..), Range (..)) import Development.IDE.GHC.Compat import Prelude hiding (length, span) import Development.IDE.GHC.Compat.Util (mkFastString) -import Language.LSP.VFS (VirtualFile (VirtualFile), _file_text) +import Language.LSP.VFS (_file_text, VirtualFile) import qualified Data.Text.Utf16.Rope as Rope import Data.Text.Utf16.Rope (Rope, splitAtPosition) import Data.Text (breakOnEnd, length, Text) import Control.Monad (guard) -import qualified Data.List deriving instance Show DeclType deriving instance Show BindType @@ -111,7 +110,7 @@ mkRange startLine startCol len = splitModuleNameAndOccName :: VirtualFile -> Range -> Identifier -> [(Range,Identifier)] splitModuleNameAndOccName _ ran (Left m) = [(ran, Left m)] splitModuleNameAndOccName vf ran@(Range (Position startLine startColumn) (Position _endLine endColumn)) (Right name) - | nameLength name < fromIntegral (endColumn - startColumn), (Just prefixLen) <- peekPrefix vf ran = + | nameLength name < fromIntegral (endColumn - startColumn), (Just prefixLen) <- peekPrefixModuleNameLength vf ran = [(Range (Position startLine startColumn) (Position startLine (startColumn + fromIntegral prefixLen)) , Left (ModuleName $ mkFastString "")), -- we do not need the module name (Range (Position startLine (startColumn + fromIntegral prefixLen)) (Position startLine endColumn), Right name)] @@ -123,8 +122,8 @@ nameLength = lengthFS . occNameFS . nameOccName -- | peek at the prefix of a range, -- if it is a qualified name, return the length of the module name. -- module name everything before the last dot. -peekPrefix :: VirtualFile -> Range -> Maybe Int -peekPrefix rp ran = do +peekPrefixModuleNameLength :: VirtualFile -> Range -> Maybe Int +peekPrefixModuleNameLength rp ran = do token <- getTextByCodePointRangeFromVfs rp ran let prefixLen = length $ fst $ breakOnEnd "." token guard $ prefixLen > 0 From 445a8254f6a90a8a4d44b9b041245179f86f34a5 Mon Sep 17 00:00:00 2001 From: Patrick Date: Wed, 17 Jan 2024 18:53:57 +0800 Subject: [PATCH 09/74] fix module name --- .../src/Ide/Plugin/SemanticTokens/Utils.hs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs index 301f588f63..64ab9c85c2 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs @@ -112,7 +112,7 @@ splitModuleNameAndOccName _ ran (Left m) = [(ran, Left m)] splitModuleNameAndOccName vf ran@(Range (Position startLine startColumn) (Position _endLine endColumn)) (Right name) | nameLength name < fromIntegral (endColumn - startColumn), (Just prefixLen) <- peekPrefixModuleNameLength vf ran = [(Range (Position startLine startColumn) (Position startLine (startColumn + fromIntegral prefixLen)) - , Left (ModuleName $ mkFastString "")), -- we do not need the module name + , Left (mkModuleName "")), -- we do not need the module name (Range (Position startLine (startColumn + fromIntegral prefixLen)) (Position startLine endColumn), Right name)] | otherwise = [(ran, Right name)] From 04d08a5ba4b21281e41f1c47c055a897d9a78900 Mon Sep 17 00:00:00 2001 From: Patrick Date: Wed, 17 Jan 2024 19:29:55 +0800 Subject: [PATCH 10/74] add test for qualified names --- plugins/hls-semantic-tokens-plugin/test/Main.hs | 3 ++- .../test/testdata/TQualifiedName.expected | 12 ++++++++++++ .../test/testdata/TQualifiedName.hs | 6 ++++++ 3 files changed, 20 insertions(+), 1 deletion(-) create mode 100644 plugins/hls-semantic-tokens-plugin/test/testdata/TQualifiedName.expected create mode 100644 plugins/hls-semantic-tokens-plugin/test/testdata/TQualifiedName.hs diff --git a/plugins/hls-semantic-tokens-plugin/test/Main.hs b/plugins/hls-semantic-tokens-plugin/test/Main.hs index 4e9bc68eb8..5cf15cc933 100644 --- a/plugins/hls-semantic-tokens-plugin/test/Main.hs +++ b/plugins/hls-semantic-tokens-plugin/test/Main.hs @@ -196,7 +196,8 @@ semanticTokensTests = goldenWithSemanticTokensWithDefaultConfig "mixed constancy test result generated from one ghc version" "T1", goldenWithSemanticTokensWithDefaultConfig "pattern bind" "TPatternSynonym", goldenWithSemanticTokensWithDefaultConfig "type family" "TTypefamily", - goldenWithSemanticTokensWithDefaultConfig "TUnicodeSyntax" "TUnicodeSyntax" + goldenWithSemanticTokensWithDefaultConfig "TUnicodeSyntax" "TUnicodeSyntax", + goldenWithSemanticTokensWithDefaultConfig "TQualifiedName" "TQualifiedName" ] semanticTokensDataTypeTests :: TestTree diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/TQualifiedName.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/TQualifiedName.expected new file mode 100644 index 0000000000..faa5bdb1bd --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/TQualifiedName.expected @@ -0,0 +1,12 @@ +5:1-2 TVariable "a" +5:5-13 TModuleName "Prelude." +5:13-22 TVariable "undefined" +6:1-2 TVariable "b" +6:7-16 TModuleName "`Prelude." +6:16-21 TClassMethod "elem`" +7:1-2 TVariable "c" +7:5-14 TModuleName "(Prelude." +7:14-16 TClassMethod "+)" +8:1-2 TVariable "d" +8:5-11 TClassMethod "length" +8:12-13 TFunction "$" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/TQualifiedName.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/TQualifiedName.hs new file mode 100644 index 0000000000..ff6ac8dbec --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/TQualifiedName.hs @@ -0,0 +1,6 @@ +module TQualifiedName where + + +a = Prelude.undefined +b = 1 `Prelude.elem` [1, 2] +c = (Prelude.+) 1 1 From 2cca23dc2c0659a4605878c2eb6243f5f6bde98f Mon Sep 17 00:00:00 2001 From: Patrick Date: Wed, 17 Jan 2024 19:32:13 +0800 Subject: [PATCH 11/74] fix test --- .../test/testdata/TQualifiedName.expected | 21 ++++++++----------- 1 file changed, 9 insertions(+), 12 deletions(-) diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/TQualifiedName.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/TQualifiedName.expected index faa5bdb1bd..fdeae91ba2 100644 --- a/plugins/hls-semantic-tokens-plugin/test/testdata/TQualifiedName.expected +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/TQualifiedName.expected @@ -1,12 +1,9 @@ -5:1-2 TVariable "a" -5:5-13 TModuleName "Prelude." -5:13-22 TVariable "undefined" -6:1-2 TVariable "b" -6:7-16 TModuleName "`Prelude." -6:16-21 TClassMethod "elem`" -7:1-2 TVariable "c" -7:5-14 TModuleName "(Prelude." -7:14-16 TClassMethod "+)" -8:1-2 TVariable "d" -8:5-11 TClassMethod "length" -8:12-13 TFunction "$" +4:1-2 TVariable "a" +4:5-13 TModuleName "Prelude." +4:13-22 TVariable "undefined" +5:1-2 TVariable "b" +5:7-16 TModuleName "`Prelude." +5:16-21 TClassMethod "elem`" +6:1-2 TVariable "c" +6:5-14 TModuleName "(Prelude." +6:14-16 TClassMethod "+)" From c0bd67a3672eddc7be146bd97664a61124de7b7c Mon Sep 17 00:00:00 2001 From: Patrick Date: Wed, 17 Jan 2024 21:12:50 +0800 Subject: [PATCH 12/74] strip () --- .../src/Ide/Plugin/SemanticTokens/Utils.hs | 17 +++++++++++------ .../test/testdata/T1.expected | 4 ++-- .../test/testdata/TQualifiedName.expected | 8 ++++---- 3 files changed, 17 insertions(+), 12 deletions(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs index 64ab9c85c2..34f3283a6f 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs @@ -21,6 +21,8 @@ import qualified Data.Text.Utf16.Rope as Rope import Data.Text.Utf16.Rope (Rope, splitAtPosition) import Data.Text (breakOnEnd, length, Text) import Control.Monad (guard) +import qualified Data.Text as T +import Data.Bool (bool) deriving instance Show DeclType deriving instance Show BindType @@ -110,10 +112,11 @@ mkRange startLine startCol len = splitModuleNameAndOccName :: VirtualFile -> Range -> Identifier -> [(Range,Identifier)] splitModuleNameAndOccName _ ran (Left m) = [(ran, Left m)] splitModuleNameAndOccName vf ran@(Range (Position startLine startColumn) (Position _endLine endColumn)) (Right name) - | nameLength name < fromIntegral (endColumn - startColumn), (Just prefixLen) <- peekPrefixModuleNameLength vf ran = - [(Range (Position startLine startColumn) (Position startLine (startColumn + fromIntegral prefixLen)) - , Left (mkModuleName "")), -- we do not need the module name - (Range (Position startLine (startColumn + fromIntegral prefixLen)) (Position startLine endColumn), Right name)] + | nameLength name < fromIntegral (endColumn - startColumn), (Just (prefixLen, stripFlag)) <- peekPrefixModuleNameLength vf ran = + [(Range (Position startLine (startColumn + bool 0 1 stripFlag)) + (Position startLine (startColumn + fromIntegral prefixLen)) , Left (mkModuleName "")), -- we do not need the module name, only tis range + (Range (Position startLine (startColumn + fromIntegral prefixLen)) + (Position startLine (endColumn + bool 0 (-1) stripFlag)), Right name)] | otherwise = [(ran, Right name)] nameLength :: Name -> Int @@ -122,12 +125,14 @@ nameLength = lengthFS . occNameFS . nameOccName -- | peek at the prefix of a range, -- if it is a qualified name, return the length of the module name. -- module name everything before the last dot. -peekPrefixModuleNameLength :: VirtualFile -> Range -> Maybe Int +peekPrefixModuleNameLength :: VirtualFile -> Range -> Maybe (Int, Bool) peekPrefixModuleNameLength rp ran = do token <- getTextByCodePointRangeFromVfs rp ran + (c, _) <- T.uncons token let prefixLen = length $ fst $ breakOnEnd "." token guard $ prefixLen > 0 - return prefixLen + return (prefixLen, c `elem` strippedChars) + where strippedChars = ['`', '('] -- | get the text from a range in a virtual file getTextByCodePointRangeFromVfs :: VirtualFile -> Range -> Maybe Text diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/T1.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/T1.expected index d3c9224b1a..408ed18812 100644 --- a/plugins/hls-semantic-tokens-plugin/test/testdata/T1.expected +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/T1.expected @@ -73,8 +73,8 @@ 41:1-3 TFunction "go" 41:6-9 TRecordField "foo" 42:1-4 TFunction "add" -42:7-16 TModuleName "(Prelude." -42:16-18 TClassMethod "+)" +42:8-16 TModuleName "Prelude." +42:16-17 TClassMethod "+" 47:1-5 TVariable "main" 47:9-11 TTypeConstructor "IO" 48:1-5 TVariable "main" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/TQualifiedName.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/TQualifiedName.expected index fdeae91ba2..f9c4225980 100644 --- a/plugins/hls-semantic-tokens-plugin/test/testdata/TQualifiedName.expected +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/TQualifiedName.expected @@ -2,8 +2,8 @@ 4:5-13 TModuleName "Prelude." 4:13-22 TVariable "undefined" 5:1-2 TVariable "b" -5:7-16 TModuleName "`Prelude." -5:16-21 TClassMethod "elem`" +5:8-16 TModuleName "Prelude." +5:16-20 TClassMethod "elem" 6:1-2 TVariable "c" -6:5-14 TModuleName "(Prelude." -6:14-16 TClassMethod "+)" +6:6-14 TModuleName "Prelude." +6:14-15 TClassMethod "+" From 7a68ccd24974449c66a7ccf64e0e1efacd6bba72 Mon Sep 17 00:00:00 2001 From: Patrick Date: Wed, 17 Jan 2024 21:33:19 +0800 Subject: [PATCH 13/74] stylish --- .../src/Ide/Plugin/SemanticTokens/Internal.hs | 6 +- .../src/Ide/Plugin/SemanticTokens/Query.hs | 71 ++++++++----------- .../src/Ide/Plugin/SemanticTokens/Utils.hs | 33 +++++---- 3 files changed, 50 insertions(+), 60 deletions(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs index 48d8ac63fa..7ff8f0c95f 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs @@ -1,13 +1,13 @@ {-# LANGUAGE DataKinds #-} {-# LANGUAGE DerivingStrategies #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE PolyKinds #-} +{-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE NamedFieldPuns #-} {-# LANGUAGE OverloadedLabels #-} +{-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE PolyKinds #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE TemplateHaskell #-} diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs index e79657af97..8ca9fb3136 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs @@ -1,49 +1,40 @@ -{-# LANGUAGE ExplicitNamespaces #-} +{-# LANGUAGE ExplicitNamespaces #-} {-# LANGUAGE OverloadedRecordDot #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE RankNTypes #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RankNTypes #-} {-# LANGUAGE ScopedTypeVariables #-} -- | -- The query module is used to query the semantic tokens from the AST module Ide.Plugin.SemanticTokens.Query where -import Data.Foldable (fold) -import qualified Data.Map as M -import qualified Data.Map as Map -import Data.Maybe - ( fromMaybe, - listToMaybe, - mapMaybe, - ) -import Data.Set (Set) -import qualified Data.Set as S -import qualified Data.Set as Set -import Data.Text (Text) -import Development.IDE.Core.PositionMapping - ( PositionMapping, - toCurrentRange, - ) -import Development.IDE.GHC.Compat -import Development.IDE.GHC.Error (realSrcSpanToCodePointRange) -import Ide.Plugin.SemanticTokens.Mappings -import Ide.Plugin.SemanticTokens.Types - ( HieFunMaskKind, - HsSemanticTokenType (TModuleName), - NameSemanticMap, - SemanticTokensConfig, - ) -import Ide.Plugin.SemanticTokens.Utils (splitModuleNameAndOccName) -import Language.LSP.Protocol.Types - ( Position (Position), - Range (Range), - SemanticTokenAbsolute (SemanticTokenAbsolute), - SemanticTokens, - defaultSemanticTokensLegend, - makeSemanticTokens, - ) -import Language.LSP.VFS -import Prelude hiding (length, span) +import Data.Foldable (fold) +import qualified Data.Map as M +import qualified Data.Map as Map +import Data.Maybe (fromMaybe, listToMaybe, + mapMaybe) +import Data.Set (Set) +import qualified Data.Set as S +import qualified Data.Set as Set +import Data.Text (Text) +import Development.IDE.Core.PositionMapping (PositionMapping, + toCurrentRange) +import Development.IDE.GHC.Compat +import Development.IDE.GHC.Error (realSrcSpanToCodePointRange) +import Ide.Plugin.SemanticTokens.Mappings +import Ide.Plugin.SemanticTokens.Types (HieFunMaskKind, + HsSemanticTokenType (TModuleName), + NameSemanticMap, + SemanticTokensConfig) +import Ide.Plugin.SemanticTokens.Utils (splitModuleNameAndOccName) +import Language.LSP.Protocol.Types (Position (Position), + Range (Range), + SemanticTokenAbsolute (SemanticTokenAbsolute), + SemanticTokens, + defaultSemanticTokensLegend, + makeSemanticTokens) +import Language.LSP.VFS +import Prelude hiding (length, span) --------------------------------------------------------- @@ -104,7 +95,7 @@ hieAstSpanIdentifiers vf ast = inclusion a b = not $ exclusion a b exclusion :: Identifier -> IdentifierDetails a -> Bool exclusion idt IdentifierDetails {identInfo = infos} = case idt of - Left _ -> False + Left _ -> False Right _ -> any isEvidenceContext (S.toList infos) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs index 34f3283a6f..17023345d0 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs @@ -1,28 +1,27 @@ -{-# LANGUAGE StandaloneDeriving #-} -{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE OverloadedRecordDot #-} -{-# LANGUAGE TypeFamilies #-} -{-# LANGUAGE PatternGuards #-} -{-# LANGUAGE ViewPatterns #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE PatternGuards #-} +{-# LANGUAGE StandaloneDeriving #-} +{-# LANGUAGE TypeFamilies #-} {-# OPTIONS_GHC -Wno-orphans #-} module Ide.Plugin.SemanticTokens.Utils where -import Data.ByteString (ByteString) -import Data.ByteString.Char8 (unpack) -import qualified Data.Map as Map -import Development.IDE (Position (..), Range (..)) +import Control.Monad (guard) +import Data.Bool (bool) +import Data.ByteString (ByteString) +import Data.ByteString.Char8 (unpack) +import qualified Data.Map as Map +import Data.Text (Text, breakOnEnd, length) +import qualified Data.Text as T +import Data.Text.Utf16.Rope (Rope, splitAtPosition) +import qualified Data.Text.Utf16.Rope as Rope +import Development.IDE (Position (..), Range (..)) import Development.IDE.GHC.Compat +import Development.IDE.GHC.Compat.Util (mkFastString) +import Language.LSP.VFS (VirtualFile, _file_text) import Prelude hiding (length, span) -import Development.IDE.GHC.Compat.Util (mkFastString) -import Language.LSP.VFS (_file_text, VirtualFile) -import qualified Data.Text.Utf16.Rope as Rope -import Data.Text.Utf16.Rope (Rope, splitAtPosition) -import Data.Text (breakOnEnd, length, Text) -import Control.Monad (guard) -import qualified Data.Text as T -import Data.Bool (bool) deriving instance Show DeclType deriving instance Show BindType From 9758193236e69a709db3b3b33a2eb062b477332f Mon Sep 17 00:00:00 2001 From: Patrick Date: Thu, 18 Jan 2024 00:16:15 +0800 Subject: [PATCH 14/74] remove wrap '' () from tokens and add test --- .../src/Ide/Plugin/SemanticTokens/Utils.hs | 32 ++++++++++++------- .../TInstanceClassMethodBind.expected | 2 +- .../testdata/TInstanceClassMethodUse.expected | 2 +- .../test/testdata/TQualifiedName.expected | 2 ++ .../test/testdata/TQualifiedName.hs | 1 + 5 files changed, 26 insertions(+), 13 deletions(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs index 17023345d0..961e505473 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs @@ -111,27 +111,37 @@ mkRange startLine startCol len = splitModuleNameAndOccName :: VirtualFile -> Range -> Identifier -> [(Range,Identifier)] splitModuleNameAndOccName _ ran (Left m) = [(ran, Left m)] splitModuleNameAndOccName vf ran@(Range (Position startLine startColumn) (Position _endLine endColumn)) (Right name) - | nameLength name < fromIntegral (endColumn - startColumn), (Just (prefixLen, stripFlag)) <- peekPrefixModuleNameLength vf ran = - [(Range (Position startLine (startColumn + bool 0 1 stripFlag)) - (Position startLine (startColumn + fromIntegral prefixLen)) , Left (mkModuleName "")), -- we do not need the module name, only tis range - (Range (Position startLine (startColumn + fromIntegral prefixLen)) - (Position startLine (endColumn + bool 0 (-1) stripFlag)), Right name)] + | nameLength name < fromIntegral (endColumn - startColumn), (Just text) <- getTextByCodePointRangeFromVfs vf ran = + let stripFlag = peekStripFlag text + in case peekPrefixModuleNameLength text of + Just prefixLen -> + [(Range (Position startLine (startColumn + bool 0 1 stripFlag)) + (Position startLine (startColumn + fromIntegral prefixLen)) , Left (mkModuleName "")), -- we do not need the module name, only tis range + (Range (Position startLine (startColumn + fromIntegral prefixLen)) + (Position startLine (endColumn + bool 0 (-1) stripFlag)), Right name)] + Nothing -> if stripFlag + then [(Range (Position startLine (startColumn+1)) (Position _endLine (endColumn-1)), Right name)] + else [(ran, Right name)] | otherwise = [(ran, Right name)] nameLength :: Name -> Int nameLength = lengthFS . occNameFS . nameOccName +peekStripFlag :: Text -> Bool +peekStripFlag token = + case T.uncons token of + Just (c, _) -> c `elem` strippedChars + Nothing -> False + where strippedChars = ['`', '('] + -- | peek at the prefix of a range, -- if it is a qualified name, return the length of the module name. -- module name everything before the last dot. -peekPrefixModuleNameLength :: VirtualFile -> Range -> Maybe (Int, Bool) -peekPrefixModuleNameLength rp ran = do - token <- getTextByCodePointRangeFromVfs rp ran - (c, _) <- T.uncons token +peekPrefixModuleNameLength :: Text -> Maybe Int +peekPrefixModuleNameLength token = do let prefixLen = length $ fst $ breakOnEnd "." token guard $ prefixLen > 0 - return (prefixLen, c `elem` strippedChars) - where strippedChars = ['`', '('] + return prefixLen -- | get the text from a range in a virtual file getTextByCodePointRangeFromVfs :: VirtualFile -> Range -> Maybe Text diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/TInstanceClassMethodBind.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/TInstanceClassMethodBind.expected index a1392ff1d9..9468da2fc0 100644 --- a/plugins/hls-semantic-tokens-plugin/test/testdata/TInstanceClassMethodBind.expected +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/TInstanceClassMethodBind.expected @@ -3,5 +3,5 @@ 4:16-19 TTypeConstructor "Int" 5:10-12 TClass "Eq" 5:13-16 TTypeConstructor "Foo" -6:5-9 TClassMethod "(==)" +6:6-8 TClassMethod "==" 6:12-21 TVariable "undefined" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/TInstanceClassMethodUse.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/TInstanceClassMethodUse.expected index 36e41ff096..e55735f77a 100644 --- a/plugins/hls-semantic-tokens-plugin/test/testdata/TInstanceClassMethodUse.expected +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/TInstanceClassMethodUse.expected @@ -1,2 +1,2 @@ 4:1-3 TFunction "go" -4:9-13 TClassMethod "(==)" +4:10-12 TClassMethod "==" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/TQualifiedName.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/TQualifiedName.expected index f9c4225980..524f9ccbe6 100644 --- a/plugins/hls-semantic-tokens-plugin/test/testdata/TQualifiedName.expected +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/TQualifiedName.expected @@ -7,3 +7,5 @@ 6:1-2 TVariable "c" 6:6-14 TModuleName "Prelude." 6:14-15 TClassMethod "+" +7:1-2 TVariable "d" +7:6-7 TClassMethod "+" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/TQualifiedName.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/TQualifiedName.hs index ff6ac8dbec..60e85af351 100644 --- a/plugins/hls-semantic-tokens-plugin/test/testdata/TQualifiedName.hs +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/TQualifiedName.hs @@ -4,3 +4,4 @@ module TQualifiedName where a = Prelude.undefined b = 1 `Prelude.elem` [1, 2] c = (Prelude.+) 1 1 +d = (+) 1 1 From 8d27484c06906c0d323c73b662943b9104c0d0d3 Mon Sep 17 00:00:00 2001 From: Patrick Date: Thu, 18 Jan 2024 06:01:37 +0800 Subject: [PATCH 15/74] fix doc and rename --- .../src/Ide/Plugin/SemanticTokens/Query.hs | 4 +-- .../Plugin/SemanticTokens/SemanticConfig.hs | 2 +- .../src/Ide/Plugin/SemanticTokens/Types.hs | 2 +- .../src/Ide/Plugin/SemanticTokens/Utils.hs | 31 ++++++++++--------- 4 files changed, 21 insertions(+), 18 deletions(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs index 8ca9fb3136..c82c10083f 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs @@ -26,7 +26,7 @@ import Ide.Plugin.SemanticTokens.Types (HieFunMaskKind, HsSemanticTokenType (TModuleName), NameSemanticMap, SemanticTokensConfig) -import Ide.Plugin.SemanticTokens.Utils (splitModuleNameAndOccName) +import Ide.Plugin.SemanticTokens.Utils (splitAndBreakModuleNameAndOccName) import Language.LSP.Protocol.Types (Position (Position), Range (Range), SemanticTokenAbsolute (SemanticTokenAbsolute), @@ -81,7 +81,7 @@ hieAstSpanIdentifiers vf ast = return $ M.fromListWith (<>) - [S.singleton <$> ri | idt <- S.toList (getNodeIds' ast'), ri <- splitModuleNameAndOccName vf range idt] + [S.singleton <$> ri | idt <- S.toList (getNodeIds' ast'), ri <- splitAndBreakModuleNameAndOccName vf range idt] getNodeIds' = Map.foldl' combineNodeIds mempty . Map.filterWithKey (\k _ -> k == SourceInfo) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs index 21574bf2c1..a15178befb 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs @@ -34,7 +34,7 @@ docName tt = case tt of TTypeSynonym -> "type synonyms" TTypeFamily -> "type families" TRecordField -> "record fields" - TModuleName -> "module names" + TModuleName -> "modules" toConfigName :: String -> String toConfigName = ("st" <>) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Types.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Types.hs index 8bfbc23156..47b0482973 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Types.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Types.hs @@ -44,7 +44,7 @@ data HsSemanticTokenType | TTypeSynonym -- Type synonym | TTypeFamily -- type family | TRecordField -- from match bind - | TModuleName -- from match bind + | TModuleName -- module name deriving (Eq, Ord, Show, Enum, Bounded, Generic, Lift) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs index 961e505473..366e17330d 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs @@ -103,17 +103,20 @@ mkRange startLine startCol len = Range (Position (fromIntegral startLine) (fromIntegral startCol)) (Position (fromIntegral startLine) (fromIntegral $ startCol + len)) --- nameLength is in code points unit. --- while Range might not in code points unit. --- but we can still use it to get the length --- since it is only used to exclude some names --- currently, we only break `(ModuleA.b)` into `(ModuleA` and `.b)` -splitModuleNameAndOccName :: VirtualFile -> Range -> Identifier -> [(Range,Identifier)] -splitModuleNameAndOccName _ ran (Left m) = [(ran, Left m)] -splitModuleNameAndOccName vf ran@(Range (Position startLine startColumn) (Position _endLine endColumn)) (Right name) - | nameLength name < fromIntegral (endColumn - startColumn), (Just text) <- getTextByCodePointRangeFromVfs vf ran = - let stripFlag = peekStripFlag text - in case peekPrefixModuleNameLength text of +-- | split a qualified identifier into module name and identifier and/or strip the (), `` +-- for `ModuleA.b`, break it into `ModuleA.` and `b` +-- for `(b)`, strip `()`, and get `b` +-- for `(ModuleA.b)`, strip `()` and break it into `ModuleA.` and `b` +-- nameLength get the length of the `b` in code points unit +-- while Range might not be in code points unit. +-- but the comparison is still valid since we only want to know if it is potentially a qualified identifier +-- or an identifier that is wrapped in () or `` +splitAndBreakModuleNameAndOccName :: VirtualFile -> Range -> Identifier -> [(Range,Identifier)] +splitAndBreakModuleNameAndOccName _ ran (Left m) = [(ran, Left m)] +splitAndBreakModuleNameAndOccName vf ran@(Range (Position startLine startColumn) (Position _endLine endColumn)) (Right name) + | nameLength name < fromIntegral (endColumn - startColumn), (Just txt) <- getTextByCodePointRangeFromVfs vf ran = + let stripFlag = peekStripFlag txt + in case peekPrefixModuleNameLength txt of Just prefixLen -> [(Range (Position startLine (startColumn + bool 0 1 stripFlag)) (Position startLine (startColumn + fromIntegral prefixLen)) , Left (mkModuleName "")), -- we do not need the module name, only tis range @@ -139,9 +142,9 @@ peekStripFlag token = -- module name everything before the last dot. peekPrefixModuleNameLength :: Text -> Maybe Int peekPrefixModuleNameLength token = do - let prefixLen = length $ fst $ breakOnEnd "." token - guard $ prefixLen > 0 - return prefixLen + let prefixLen = length $ fst $ breakOnEnd "." token + guard $ prefixLen > 0 + return prefixLen -- | get the text from a range in a virtual file getTextByCodePointRangeFromVfs :: VirtualFile -> Range -> Maybe Text From 6e69acb4e667e6e3a389f748ee6ced4b6ef6d76f Mon Sep 17 00:00:00 2001 From: Patrick Date: Thu, 18 Jan 2024 06:11:32 +0800 Subject: [PATCH 16/74] improve test TQualifiedName --- .../test/testdata/TQualifiedName.expected | 23 ++++++++++--------- .../test/testdata/TQualifiedName.hs | 4 +++- 2 files changed, 15 insertions(+), 12 deletions(-) diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/TQualifiedName.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/TQualifiedName.expected index 524f9ccbe6..771756f1eb 100644 --- a/plugins/hls-semantic-tokens-plugin/test/testdata/TQualifiedName.expected +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/TQualifiedName.expected @@ -1,11 +1,12 @@ -4:1-2 TVariable "a" -4:5-13 TModuleName "Prelude." -4:13-22 TVariable "undefined" -5:1-2 TVariable "b" -5:8-16 TModuleName "Prelude." -5:16-20 TClassMethod "elem" -6:1-2 TVariable "c" -6:6-14 TModuleName "Prelude." -6:14-15 TClassMethod "+" -7:1-2 TVariable "d" -7:6-7 TClassMethod "+" +3:18-27 TModuleName "Data.List" +6:1-2 TVariable "a" +6:5-13 TModuleName "Prelude." +6:13-22 TVariable "undefined" +7:1-2 TVariable "b" +7:8-18 TModuleName "Data.List." +7:18-22 TClassMethod "elem" +8:1-2 TVariable "c" +8:6-14 TModuleName "Prelude." +8:14-15 TClassMethod "+" +9:1-2 TVariable "d" +9:6-7 TClassMethod "+" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/TQualifiedName.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/TQualifiedName.hs index 60e85af351..5dbdcc1d52 100644 --- a/plugins/hls-semantic-tokens-plugin/test/testdata/TQualifiedName.hs +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/TQualifiedName.hs @@ -1,7 +1,9 @@ module TQualifiedName where +import qualified Data.List + a = Prelude.undefined -b = 1 `Prelude.elem` [1, 2] +b = 1 `Data.List.elem` [1, 2] c = (Prelude.+) 1 1 d = (+) 1 1 From 10d92e6e0f72787e3968a4940bd39b789ef29bf7 Mon Sep 17 00:00:00 2001 From: Patrick Date: Thu, 18 Jan 2024 06:15:29 +0800 Subject: [PATCH 17/74] rename --- .../src/Ide/Plugin/SemanticTokens/Query.hs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs index c82c10083f..49d6d6638b 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs @@ -95,8 +95,8 @@ hieAstSpanIdentifiers vf ast = inclusion a b = not $ exclusion a b exclusion :: Identifier -> IdentifierDetails a -> Bool exclusion idt IdentifierDetails {identInfo = infos} = case idt of - Left _ -> False - Right _ -> any isEvidenceContext (S.toList infos) + Left _moduleName -> False + Right _name -> any isEvidenceContext (S.toList infos) ------------------------------------------------- From 8fe342ece8bd3fd0eaccbb9cfb1bad124ba451d4 Mon Sep 17 00:00:00 2001 From: Patrick Date: Thu, 18 Jan 2024 06:20:32 +0800 Subject: [PATCH 18/74] add doc --- .../src/Ide/Plugin/SemanticTokens/Query.hs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs index 49d6d6638b..c29f3d3839 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs @@ -96,6 +96,8 @@ hieAstSpanIdentifiers vf ast = exclusion :: Identifier -> IdentifierDetails a -> Bool exclusion idt IdentifierDetails {identInfo = infos} = case idt of Left _moduleName -> False + -- filter out the evidence names since they are visible, + -- derived names are not filtered out since they are not visible at use site. Right _name -> any isEvidenceContext (S.toList infos) From 481a404635457c46946373f5e25d51ebc80d10f0 Mon Sep 17 00:00:00 2001 From: Patrick Date: Thu, 18 Jan 2024 06:26:51 +0800 Subject: [PATCH 19/74] fix test --- test/testdata/schema/ghc92/default-config.golden.json | 2 +- test/testdata/schema/ghc92/vscode-extension-schema.golden.json | 2 +- test/testdata/schema/ghc94/default-config.golden.json | 2 +- test/testdata/schema/ghc94/vscode-extension-schema.golden.json | 2 +- test/testdata/schema/ghc96/default-config.golden.json | 2 +- test/testdata/schema/ghc96/vscode-extension-schema.golden.json | 2 +- test/testdata/schema/ghc98/default-config.golden.json | 2 +- test/testdata/schema/ghc98/vscode-extension-schema.golden.json | 2 +- 8 files changed, 8 insertions(+), 8 deletions(-) diff --git a/test/testdata/schema/ghc92/default-config.golden.json b/test/testdata/schema/ghc92/default-config.golden.json index a5cbc48064..58efc471fe 100644 --- a/test/testdata/schema/ghc92/default-config.golden.json +++ b/test/testdata/schema/ghc92/default-config.golden.json @@ -122,7 +122,7 @@ "classToken": "class", "dataConstructorToken": "enumMember", "functionToken": "function", - "moduleNameToken": "namespace", + "moduleName": "namespace", "patternSynonymToken": "macro", "recordFieldToken": "property", "typeConstructorToken": "enum", diff --git a/test/testdata/schema/ghc92/vscode-extension-schema.golden.json b/test/testdata/schema/ghc92/vscode-extension-schema.golden.json index f8d84cbea9..7d3a6125b8 100644 --- a/test/testdata/schema/ghc92/vscode-extension-schema.golden.json +++ b/test/testdata/schema/ghc92/vscode-extension-schema.golden.json @@ -485,7 +485,7 @@ "scope": "resource", "type": "string" }, - "haskell.plugin.semanticTokens.config.moduleNameToken": { + "haskell.plugin.semanticTokens.config.module": { "default": "namespace", "description": "LSP semantic token type to use for module names", "enum": [ diff --git a/test/testdata/schema/ghc94/default-config.golden.json b/test/testdata/schema/ghc94/default-config.golden.json index 15c28e5c6c..f550b4cc37 100644 --- a/test/testdata/schema/ghc94/default-config.golden.json +++ b/test/testdata/schema/ghc94/default-config.golden.json @@ -122,7 +122,7 @@ "classToken": "class", "dataConstructorToken": "enumMember", "functionToken": "function", - "moduleNameToken": "namespace", + "moduleName": "namespace", "patternSynonymToken": "macro", "recordFieldToken": "property", "typeConstructorToken": "enum", diff --git a/test/testdata/schema/ghc94/vscode-extension-schema.golden.json b/test/testdata/schema/ghc94/vscode-extension-schema.golden.json index 6ee77630b8..fb80e809ab 100644 --- a/test/testdata/schema/ghc94/vscode-extension-schema.golden.json +++ b/test/testdata/schema/ghc94/vscode-extension-schema.golden.json @@ -485,7 +485,7 @@ "scope": "resource", "type": "string" }, - "haskell.plugin.semanticTokens.config.moduleNameToken": { + "haskell.plugin.semanticTokens.config.module": { "default": "namespace", "description": "LSP semantic token type to use for module names", "enum": [ diff --git a/test/testdata/schema/ghc96/default-config.golden.json b/test/testdata/schema/ghc96/default-config.golden.json index 15c28e5c6c..f550b4cc37 100644 --- a/test/testdata/schema/ghc96/default-config.golden.json +++ b/test/testdata/schema/ghc96/default-config.golden.json @@ -122,7 +122,7 @@ "classToken": "class", "dataConstructorToken": "enumMember", "functionToken": "function", - "moduleNameToken": "namespace", + "moduleName": "namespace", "patternSynonymToken": "macro", "recordFieldToken": "property", "typeConstructorToken": "enum", diff --git a/test/testdata/schema/ghc96/vscode-extension-schema.golden.json b/test/testdata/schema/ghc96/vscode-extension-schema.golden.json index 6ee77630b8..fb80e809ab 100644 --- a/test/testdata/schema/ghc96/vscode-extension-schema.golden.json +++ b/test/testdata/schema/ghc96/vscode-extension-schema.golden.json @@ -485,7 +485,7 @@ "scope": "resource", "type": "string" }, - "haskell.plugin.semanticTokens.config.moduleNameToken": { + "haskell.plugin.semanticTokens.config.module": { "default": "namespace", "description": "LSP semantic token type to use for module names", "enum": [ diff --git a/test/testdata/schema/ghc98/default-config.golden.json b/test/testdata/schema/ghc98/default-config.golden.json index fe1725f4fb..2a57a82ac9 100644 --- a/test/testdata/schema/ghc98/default-config.golden.json +++ b/test/testdata/schema/ghc98/default-config.golden.json @@ -115,7 +115,7 @@ "classToken": "class", "dataConstructorToken": "enumMember", "functionToken": "function", - "moduleNameToken": "namespace", + "moduleName": "namespace", "patternSynonymToken": "macro", "recordFieldToken": "property", "typeConstructorToken": "enum", diff --git a/test/testdata/schema/ghc98/vscode-extension-schema.golden.json b/test/testdata/schema/ghc98/vscode-extension-schema.golden.json index 8703df1a2b..81b3ddee7d 100644 --- a/test/testdata/schema/ghc98/vscode-extension-schema.golden.json +++ b/test/testdata/schema/ghc98/vscode-extension-schema.golden.json @@ -467,7 +467,7 @@ "scope": "resource", "type": "string" }, - "haskell.plugin.semanticTokens.config.moduleNameToken": { + "haskell.plugin.semanticTokens.config.module": { "default": "namespace", "description": "LSP semantic token type to use for module names", "enum": [ From 16f7086a7edbe342842edf110c3f487173afccf4 Mon Sep 17 00:00:00 2001 From: Patrick Date: Thu, 18 Jan 2024 06:31:20 +0800 Subject: [PATCH 20/74] fix test --- test/testdata/schema/ghc92/default-config.golden.json | 4 ++-- .../testdata/schema/ghc92/vscode-extension-schema.golden.json | 4 ++-- test/testdata/schema/ghc94/default-config.golden.json | 4 ++-- .../testdata/schema/ghc94/vscode-extension-schema.golden.json | 4 ++-- test/testdata/schema/ghc96/default-config.golden.json | 4 ++-- .../testdata/schema/ghc96/vscode-extension-schema.golden.json | 4 ++-- test/testdata/schema/ghc98/default-config.golden.json | 4 ++-- .../testdata/schema/ghc98/vscode-extension-schema.golden.json | 4 ++-- 8 files changed, 16 insertions(+), 16 deletions(-) diff --git a/test/testdata/schema/ghc92/default-config.golden.json b/test/testdata/schema/ghc92/default-config.golden.json index 58efc471fe..0a0de12313 100644 --- a/test/testdata/schema/ghc92/default-config.golden.json +++ b/test/testdata/schema/ghc92/default-config.golden.json @@ -84,7 +84,7 @@ "codeActionsOn": true, "codeLensOn": true }, - "moduleName": { + "moduleToken": { "globalOn": true }, "ormolu": { @@ -122,7 +122,7 @@ "classToken": "class", "dataConstructorToken": "enumMember", "functionToken": "function", - "moduleName": "namespace", + "moduleToken": "namespace", "patternSynonymToken": "macro", "recordFieldToken": "property", "typeConstructorToken": "enum", diff --git a/test/testdata/schema/ghc92/vscode-extension-schema.golden.json b/test/testdata/schema/ghc92/vscode-extension-schema.golden.json index 7d3a6125b8..fcff330b84 100644 --- a/test/testdata/schema/ghc92/vscode-extension-schema.golden.json +++ b/test/testdata/schema/ghc92/vscode-extension-schema.golden.json @@ -485,9 +485,9 @@ "scope": "resource", "type": "string" }, - "haskell.plugin.semanticTokens.config.module": { + "haskell.plugin.semanticTokens.config.moduleToken": { "default": "namespace", - "description": "LSP semantic token type to use for module names", + "description": "LSP semantic token type to use for modules", "enum": [ "namespace", "type", diff --git a/test/testdata/schema/ghc94/default-config.golden.json b/test/testdata/schema/ghc94/default-config.golden.json index f550b4cc37..57111c2d2e 100644 --- a/test/testdata/schema/ghc94/default-config.golden.json +++ b/test/testdata/schema/ghc94/default-config.golden.json @@ -84,7 +84,7 @@ "codeActionsOn": true, "codeLensOn": true }, - "moduleName": { + "moduleToken": { "globalOn": true }, "ormolu": { @@ -122,7 +122,7 @@ "classToken": "class", "dataConstructorToken": "enumMember", "functionToken": "function", - "moduleName": "namespace", + "moduleToken": "namespace", "patternSynonymToken": "macro", "recordFieldToken": "property", "typeConstructorToken": "enum", diff --git a/test/testdata/schema/ghc94/vscode-extension-schema.golden.json b/test/testdata/schema/ghc94/vscode-extension-schema.golden.json index fb80e809ab..73ed5b0855 100644 --- a/test/testdata/schema/ghc94/vscode-extension-schema.golden.json +++ b/test/testdata/schema/ghc94/vscode-extension-schema.golden.json @@ -485,9 +485,9 @@ "scope": "resource", "type": "string" }, - "haskell.plugin.semanticTokens.config.module": { + "haskell.plugin.semanticTokens.config.moduleToken": { "default": "namespace", - "description": "LSP semantic token type to use for module names", + "description": "LSP semantic token type to use for modules", "enum": [ "namespace", "type", diff --git a/test/testdata/schema/ghc96/default-config.golden.json b/test/testdata/schema/ghc96/default-config.golden.json index f550b4cc37..57111c2d2e 100644 --- a/test/testdata/schema/ghc96/default-config.golden.json +++ b/test/testdata/schema/ghc96/default-config.golden.json @@ -84,7 +84,7 @@ "codeActionsOn": true, "codeLensOn": true }, - "moduleName": { + "moduleToken": { "globalOn": true }, "ormolu": { @@ -122,7 +122,7 @@ "classToken": "class", "dataConstructorToken": "enumMember", "functionToken": "function", - "moduleName": "namespace", + "moduleToken": "namespace", "patternSynonymToken": "macro", "recordFieldToken": "property", "typeConstructorToken": "enum", diff --git a/test/testdata/schema/ghc96/vscode-extension-schema.golden.json b/test/testdata/schema/ghc96/vscode-extension-schema.golden.json index fb80e809ab..73ed5b0855 100644 --- a/test/testdata/schema/ghc96/vscode-extension-schema.golden.json +++ b/test/testdata/schema/ghc96/vscode-extension-schema.golden.json @@ -485,9 +485,9 @@ "scope": "resource", "type": "string" }, - "haskell.plugin.semanticTokens.config.module": { + "haskell.plugin.semanticTokens.config.moduleToken": { "default": "namespace", - "description": "LSP semantic token type to use for module names", + "description": "LSP semantic token type to use for modules", "enum": [ "namespace", "type", diff --git a/test/testdata/schema/ghc98/default-config.golden.json b/test/testdata/schema/ghc98/default-config.golden.json index 2a57a82ac9..98fa648915 100644 --- a/test/testdata/schema/ghc98/default-config.golden.json +++ b/test/testdata/schema/ghc98/default-config.golden.json @@ -77,7 +77,7 @@ "codeActionsOn": true, "codeLensOn": true }, - "moduleName": { + "moduleToken": { "globalOn": true }, "ormolu": { @@ -115,7 +115,7 @@ "classToken": "class", "dataConstructorToken": "enumMember", "functionToken": "function", - "moduleName": "namespace", + "moduleToken": "namespace", "patternSynonymToken": "macro", "recordFieldToken": "property", "typeConstructorToken": "enum", diff --git a/test/testdata/schema/ghc98/vscode-extension-schema.golden.json b/test/testdata/schema/ghc98/vscode-extension-schema.golden.json index 81b3ddee7d..d79f94383b 100644 --- a/test/testdata/schema/ghc98/vscode-extension-schema.golden.json +++ b/test/testdata/schema/ghc98/vscode-extension-schema.golden.json @@ -467,9 +467,9 @@ "scope": "resource", "type": "string" }, - "haskell.plugin.semanticTokens.config.module": { + "haskell.plugin.semanticTokens.config.moduleToken": { "default": "namespace", - "description": "LSP semantic token type to use for module names", + "description": "LSP semantic token type to use for modules", "enum": [ "namespace", "type", From 72e8eee0f786cd92acb4a904391153bf4ca89e58 Mon Sep 17 00:00:00 2001 From: Patrick Date: Thu, 18 Jan 2024 06:33:11 +0800 Subject: [PATCH 21/74] fix test --- test/testdata/schema/ghc92/default-config.golden.json | 2 +- test/testdata/schema/ghc94/default-config.golden.json | 2 +- test/testdata/schema/ghc96/default-config.golden.json | 2 +- test/testdata/schema/ghc98/default-config.golden.json | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/test/testdata/schema/ghc92/default-config.golden.json b/test/testdata/schema/ghc92/default-config.golden.json index 0a0de12313..78ee03b5d2 100644 --- a/test/testdata/schema/ghc92/default-config.golden.json +++ b/test/testdata/schema/ghc92/default-config.golden.json @@ -84,7 +84,7 @@ "codeActionsOn": true, "codeLensOn": true }, - "moduleToken": { + "moduleName": { "globalOn": true }, "ormolu": { diff --git a/test/testdata/schema/ghc94/default-config.golden.json b/test/testdata/schema/ghc94/default-config.golden.json index 57111c2d2e..6bd1d4a642 100644 --- a/test/testdata/schema/ghc94/default-config.golden.json +++ b/test/testdata/schema/ghc94/default-config.golden.json @@ -84,7 +84,7 @@ "codeActionsOn": true, "codeLensOn": true }, - "moduleToken": { + "moduleName": { "globalOn": true }, "ormolu": { diff --git a/test/testdata/schema/ghc96/default-config.golden.json b/test/testdata/schema/ghc96/default-config.golden.json index 57111c2d2e..6bd1d4a642 100644 --- a/test/testdata/schema/ghc96/default-config.golden.json +++ b/test/testdata/schema/ghc96/default-config.golden.json @@ -84,7 +84,7 @@ "codeActionsOn": true, "codeLensOn": true }, - "moduleToken": { + "moduleName": { "globalOn": true }, "ormolu": { diff --git a/test/testdata/schema/ghc98/default-config.golden.json b/test/testdata/schema/ghc98/default-config.golden.json index 98fa648915..3a1db12be3 100644 --- a/test/testdata/schema/ghc98/default-config.golden.json +++ b/test/testdata/schema/ghc98/default-config.golden.json @@ -77,7 +77,7 @@ "codeActionsOn": true, "codeLensOn": true }, - "moduleToken": { + "moduleName": { "globalOn": true }, "ormolu": { From 2a8855fd8067f14cdef0ec644784865c77d98eae Mon Sep 17 00:00:00 2001 From: Patrick Date: Thu, 18 Jan 2024 06:36:30 +0800 Subject: [PATCH 22/74] fix test --- .../src/Ide/Plugin/SemanticTokens/Mappings.hs | 2 +- .../src/Ide/Plugin/SemanticTokens/Query.hs | 4 ++-- .../src/Ide/Plugin/SemanticTokens/SemanticConfig.hs | 2 +- .../src/Ide/Plugin/SemanticTokens/Types.hs | 6 +++--- plugins/hls-semantic-tokens-plugin/test/Main.hs | 2 +- .../hls-semantic-tokens-plugin/test/testdata/T1.expected | 4 ++-- .../test/testdata/TDatatypeImported.expected | 2 +- .../test/testdata/TQualifiedName.expected | 8 ++++---- 8 files changed, 15 insertions(+), 15 deletions(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Mappings.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Mappings.hs index 5fb0f46161..c2ade3ce3c 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Mappings.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Mappings.hs @@ -45,7 +45,7 @@ toLspTokenType conf tk = case tk of TTypeFamily -> stTypeFamily conf TRecordField -> stRecordField conf TPatternSynonym -> stPatternSynonym conf - TModuleName -> stModuleName conf + TModule -> stModule conf lspTokenReverseMap :: SemanticTokensConfig -> Map.Map SemanticTokenTypes HsSemanticTokenType lspTokenReverseMap config diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs index c29f3d3839..a082367c49 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs @@ -23,7 +23,7 @@ import Development.IDE.GHC.Compat import Development.IDE.GHC.Error (realSrcSpanToCodePointRange) import Ide.Plugin.SemanticTokens.Mappings import Ide.Plugin.SemanticTokens.Types (HieFunMaskKind, - HsSemanticTokenType (TModuleName), + HsSemanticTokenType (TModule), NameSemanticMap, SemanticTokensConfig) import Ide.Plugin.SemanticTokens.Utils (splitAndBreakModuleNameAndOccName) @@ -46,7 +46,7 @@ mkLocalNameSemanticFromAst :: [Identifier] -> HieFunMaskKind a -> RefMap a -> Na mkLocalNameSemanticFromAst names hieKind rm = M.fromList (mapMaybe (nameNameSemanticFromHie hieKind rm) names) nameNameSemanticFromHie :: forall a. HieFunMaskKind a -> RefMap a -> Identifier -> Maybe (Identifier, HsSemanticTokenType) -nameNameSemanticFromHie _ _ ns@(Left _) = Just (ns, TModuleName) +nameNameSemanticFromHie _ _ ns@(Left _) = Just (ns, TModule) nameNameSemanticFromHie hieKind rm ns@(Right _) = do st <- nameSemanticFromRefMap rm ns return (ns, st) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs index a15178befb..3978a3b701 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs @@ -34,7 +34,7 @@ docName tt = case tt of TTypeSynonym -> "type synonyms" TTypeFamily -> "type families" TRecordField -> "record fields" - TModuleName -> "modules" + TModule -> "modules" toConfigName :: String -> String toConfigName = ("st" <>) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Types.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Types.hs index 47b0482973..cc253e6db4 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Types.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Types.hs @@ -44,7 +44,7 @@ data HsSemanticTokenType | TTypeSynonym -- Type synonym | TTypeFamily -- type family | TRecordField -- from match bind - | TModuleName -- module name + | TModule -- module name deriving (Eq, Ord, Show, Enum, Bounded, Generic, Lift) @@ -67,7 +67,7 @@ instance Default SemanticTokensConfig where , stTypeSynonym = SemanticTokenTypes_Type , stTypeFamily = SemanticTokenTypes_Interface , stRecordField = SemanticTokenTypes_Property - , stModuleName = SemanticTokenTypes_Namespace + , stModule = SemanticTokenTypes_Namespace } -- | SemanticTokensConfig_ is a configuration for the semantic tokens plugin. -- it contains map between the hs semantic token type and default token type. @@ -83,7 +83,7 @@ data SemanticTokensConfig = STC , stTypeSynonym :: !SemanticTokenTypes , stTypeFamily :: !SemanticTokenTypes , stRecordField :: !SemanticTokenTypes - , stModuleName :: !SemanticTokenTypes + , stModule :: !SemanticTokenTypes } deriving (Generic, Show) diff --git a/plugins/hls-semantic-tokens-plugin/test/Main.hs b/plugins/hls-semantic-tokens-plugin/test/Main.hs index 5cf15cc933..05dd19140b 100644 --- a/plugins/hls-semantic-tokens-plugin/test/Main.hs +++ b/plugins/hls-semantic-tokens-plugin/test/Main.hs @@ -168,7 +168,7 @@ semanticTokensTests = let file2 = "TModuleB.hs" let expect = [ - SemanticTokenOriginal TModuleName (Loc 3 8 8) "TModuleA", + SemanticTokenOriginal TModule (Loc 3 8 8) "TModuleA", SemanticTokenOriginal TVariable (Loc 5 1 2) "go", SemanticTokenOriginal TDataConstructor (Loc 5 6 4) "Game" ] diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/T1.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/T1.expected index 408ed18812..5377bb2728 100644 --- a/plugins/hls-semantic-tokens-plugin/test/testdata/T1.expected +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/T1.expected @@ -22,7 +22,7 @@ 21:7-10 TPatternSynonym "One" 23:6-9 TTypeConstructor "Doo" 23:12-15 TDataConstructor "Doo" -23:16-24 TModuleName "Prelude." +23:16-24 TModule "Prelude." 23:24-27 TTypeConstructor "Int" 24:6-10 TTypeSynonym "Bar1" 24:13-16 TTypeConstructor "Int" @@ -73,7 +73,7 @@ 41:1-3 TFunction "go" 41:6-9 TRecordField "foo" 42:1-4 TFunction "add" -42:8-16 TModuleName "Prelude." +42:8-16 TModule "Prelude." 42:16-17 TClassMethod "+" 47:1-5 TVariable "main" 47:9-11 TTypeConstructor "IO" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/TDatatypeImported.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/TDatatypeImported.expected index a59308400d..2c2cd492a0 100644 --- a/plugins/hls-semantic-tokens-plugin/test/testdata/TDatatypeImported.expected +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/TDatatypeImported.expected @@ -1,4 +1,4 @@ -3:8-17 TModuleName "System.IO" +3:8-17 TModule "System.IO" 5:1-3 TVariable "go" 5:7-9 TTypeConstructor "IO" 6:1-3 TVariable "go" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/TQualifiedName.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/TQualifiedName.expected index 771756f1eb..cdbe36bc46 100644 --- a/plugins/hls-semantic-tokens-plugin/test/testdata/TQualifiedName.expected +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/TQualifiedName.expected @@ -1,12 +1,12 @@ -3:18-27 TModuleName "Data.List" +3:18-27 TModule "Data.List" 6:1-2 TVariable "a" -6:5-13 TModuleName "Prelude." +6:5-13 TModule "Prelude." 6:13-22 TVariable "undefined" 7:1-2 TVariable "b" -7:8-18 TModuleName "Data.List." +7:8-18 TModule "Data.List." 7:18-22 TClassMethod "elem" 8:1-2 TVariable "c" -8:6-14 TModuleName "Prelude." +8:6-14 TModule "Prelude." 8:14-15 TClassMethod "+" 9:1-2 TVariable "d" 9:6-7 TClassMethod "+" From 511e6700b5cb8223726881fd5c724a12de86b7e5 Mon Sep 17 00:00:00 2001 From: Patrick Date: Thu, 18 Jan 2024 07:25:38 +0800 Subject: [PATCH 23/74] stylish --- .../src/Ide/Plugin/SemanticTokens/Mappings.hs | 2 +- .../src/Ide/Plugin/SemanticTokens/Query.hs | 4 ++-- .../src/Ide/Plugin/SemanticTokens/SemanticConfig.hs | 2 +- .../src/Ide/Plugin/SemanticTokens/Types.hs | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Mappings.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Mappings.hs index c2ade3ce3c..27db5a0894 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Mappings.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Mappings.hs @@ -45,7 +45,7 @@ toLspTokenType conf tk = case tk of TTypeFamily -> stTypeFamily conf TRecordField -> stRecordField conf TPatternSynonym -> stPatternSynonym conf - TModule -> stModule conf + TModule -> stModule conf lspTokenReverseMap :: SemanticTokensConfig -> Map.Map SemanticTokenTypes HsSemanticTokenType lspTokenReverseMap config diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs index a082367c49..bbe02dbb8d 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs @@ -95,10 +95,10 @@ hieAstSpanIdentifiers vf ast = inclusion a b = not $ exclusion a b exclusion :: Identifier -> IdentifierDetails a -> Bool exclusion idt IdentifierDetails {identInfo = infos} = case idt of - Left _moduleName -> False + Left _moduleName -> False -- filter out the evidence names since they are visible, -- derived names are not filtered out since they are not visible at use site. - Right _name -> any isEvidenceContext (S.toList infos) + Right _name -> any isEvidenceContext (S.toList infos) ------------------------------------------------- diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs index 3978a3b701..5d7bed662b 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs @@ -34,7 +34,7 @@ docName tt = case tt of TTypeSynonym -> "type synonyms" TTypeFamily -> "type families" TRecordField -> "record fields" - TModule -> "modules" + TModule -> "modules" toConfigName :: String -> String toConfigName = ("st" <>) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Types.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Types.hs index cc253e6db4..f1e8807de8 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Types.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Types.hs @@ -83,7 +83,7 @@ data SemanticTokensConfig = STC , stTypeSynonym :: !SemanticTokenTypes , stTypeFamily :: !SemanticTokenTypes , stRecordField :: !SemanticTokenTypes - , stModule :: !SemanticTokenTypes + , stModule :: !SemanticTokenTypes } deriving (Generic, Show) From 9072e7b16f560c2ef8c6ec43e78e394bdd2dce0b Mon Sep 17 00:00:00 2001 From: Patrick Date: Mon, 22 Jan 2024 20:57:11 +0800 Subject: [PATCH 24/74] add tokenize --- .../hls-semantic-tokens-plugin.cabal | 1 + .../src/Ide/Plugin/SemanticTokens/Internal.hs | 1 + .../src/Ide/Plugin/SemanticTokens/Query.hs | 41 +-- .../src/Ide/Plugin/SemanticTokens/Tokenize.hs | 264 ++++++++++++++++++ .../src/Ide/Plugin/SemanticTokens/Utils.hs | 10 +- .../hls-semantic-tokens-plugin/test/Main.hs | 30 +- .../testdata/TModula\360\220\220\200bA.hs" | 5 + .../test/testdata/TModuleA.hs | 3 - .../test/testdata/TModuleB.hs | 5 +- .../testdata/TRecordDuplicateRecordFields.hs | 2 +- 10 files changed, 303 insertions(+), 59 deletions(-) create mode 100644 plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs create mode 100644 "plugins/hls-semantic-tokens-plugin/test/testdata/TModula\360\220\220\200bA.hs" delete mode 100644 plugins/hls-semantic-tokens-plugin/test/testdata/TModuleA.hs diff --git a/plugins/hls-semantic-tokens-plugin/hls-semantic-tokens-plugin.cabal b/plugins/hls-semantic-tokens-plugin/hls-semantic-tokens-plugin.cabal index ee52848479..39486d4ff8 100644 --- a/plugins/hls-semantic-tokens-plugin/hls-semantic-tokens-plugin.cabal +++ b/plugins/hls-semantic-tokens-plugin/hls-semantic-tokens-plugin.cabal @@ -30,6 +30,7 @@ library Ide.Plugin.SemanticTokens.Query Ide.Plugin.SemanticTokens.SemanticConfig Ide.Plugin.SemanticTokens.Utils + Ide.Plugin.SemanticTokens.Tokenize Ide.Plugin.SemanticTokens.Internal hs-source-dirs: src diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs index 7ff8f0c95f..409bd9a160 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs @@ -66,6 +66,7 @@ import Language.LSP.Protocol.Types (NormalizedFilePath, SemanticTokens, type (|?) (InL)) import Prelude hiding (span) +import Ide.Plugin.SemanticTokens.Tokenize (hieAstSpanIdentifiers) $mkSemanticConfigFunctions diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs index bbe02dbb8d..e1919c3dd5 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs @@ -26,7 +26,7 @@ import Ide.Plugin.SemanticTokens.Types (HieFunMaskKind, HsSemanticTokenType (TModule), NameSemanticMap, SemanticTokensConfig) -import Ide.Plugin.SemanticTokens.Utils (splitAndBreakModuleNameAndOccName) +import Ide.Plugin.SemanticTokens.Utils (splitAndBreakModuleNameAndOccName, getTextByCodePointRangeFromVfs) import Language.LSP.Protocol.Types (Position (Position), Range (Range), SemanticTokenAbsolute (SemanticTokenAbsolute), @@ -61,45 +61,6 @@ nameNameSemanticFromHie hieKind rm ns@(Right _) = do contextInfosMaybeTokenType :: Set.Set ContextInfo -> Maybe HsSemanticTokenType contextInfosMaybeTokenType details = foldMap infoTokenType (Set.toList details) ------------------------------------ - --- * extract location from HieAST a - ------------------------------------ - --- | get only visible names from HieAST --- we care only the leaf node of the AST --- and filter out the derived and evidence names -hieAstSpanIdentifiers :: VirtualFile -> HieAST a -> M.Map Range (Set Identifier) -hieAstSpanIdentifiers vf ast = - if null (nodeChildren ast) - then getIds ast - else M.unionsWith S.union $ map (hieAstSpanIdentifiers vf) (nodeChildren ast) - where - getIds ast' = fromMaybe mempty $ do - range <- codePointRangeToRange vf $ realSrcSpanToCodePointRange $ nodeSpan ast' - return $ - M.fromListWith - (<>) - [S.singleton <$> ri | idt <- S.toList (getNodeIds' ast'), ri <- splitAndBreakModuleNameAndOccName vf range idt] - getNodeIds' = - Map.foldl' combineNodeIds mempty - . Map.filterWithKey (\k _ -> k == SourceInfo) - . getSourcedNodeInfo - . sourcedNodeInfo - combineNodeIds :: Set Identifier -> NodeInfo a -> Set Identifier - ad `combineNodeIds` (NodeInfo _ _ bd) = ad `S.union` xs - where - xs = S.fromList $ M.keys $ M.filterWithKey inclusion bd - inclusion :: Identifier -> IdentifierDetails a -> Bool - inclusion a b = not $ exclusion a b - exclusion :: Identifier -> IdentifierDetails a -> Bool - exclusion idt IdentifierDetails {identInfo = infos} = case idt of - Left _moduleName -> False - -- filter out the evidence names since they are visible, - -- derived names are not filtered out since they are not visible at use site. - Right _name -> any isEvidenceContext (S.toList infos) - ------------------------------------------------- diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs new file mode 100644 index 0000000000..5a774706b3 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs @@ -0,0 +1,264 @@ +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE DeriveFunctor #-} +{-# LANGUAGE RankNTypes #-} + +module Ide.Plugin.SemanticTokens.Tokenize (hieAstSpanIdentifiers) where + +import Data.Foldable (Foldable (foldl')) +import qualified Data.Map as M +import Control.Monad (guard) +import qualified Data.Map as Map +import Data.Maybe (fromMaybe) +import Data.Set (Set) +import qualified Data.Set as S +import qualified Data.Text as T +import Data.Text (Text) +import Development.IDE.GHC.Compat +import Development.IDE.GHC.Error (realSrcSpanToCodePointRange) +import Language.LSP.Protocol.Types (Position (Position), + Range (Range), UInt) +import Language.LSP.VFS hiding (line) +import Prelude hiding (length, span) +import Data.Text.Utf16.Rope.Mixed (Rope) +import qualified Data.Text.Utf16.Rope.Mixed as Rope +import qualified Data.Text.Rope as Char +import Data.Text.Utf16.Rope (toText) +import Control.Monad.State (State, modify, MonadState (get), gets, MonadTrans (lift), runStateT, put) +import Control.Monad.Trans.State (StateT) +import Control.Monad.State (evalStateT) +import Control.Monad.State (execStateT) +import Control.Monad (forM_) +import Control.Monad.State (execState) +import Debug.Trace (traceShow, traceShowM, trace) +import Ide.Plugin.SemanticTokens.Utils (showIdentifier, rangeShortStr) +import qualified Data.Text.Utf16.Rope as Utf16 +import Development.IDE (pretty) + + + +type RangeIdSetMap = Map.Map Range (Set Identifier) +type TokenState = (RangeIdSetMap, Rope, Char.Position) +data PTokenState t = PTokenState { + rangeIdSetMap :: RangeIdSetMap + , rope :: Rope + , cursor :: Char.Position + , currentAst :: HieAST t + , columnsInUtf16 :: UInt + , currentRange :: Range + , currentRangeContext :: RangeSplitContext + } +data RangeSplitContext = RangeSplitContext { + fullText :: Text, + fullRange :: Range, + splitResult :: SplitResult +} deriving (Show) +emptyRangeSplitContext :: RangeSplitContext +emptyRangeSplitContext = RangeSplitContext "" (Range (Position 0 0) (Position 0 0)) (NoSplit ("", Range (Position 0 0) (Position 0 0))) + + +data SplitResult = + NoSplit (Text, Range) | + -- token text, prefix range(module range), token range + Split (Text, Range, Range) deriving (Show) + +startRange :: Range +startRange = Range (Position 0 0) (Position 0 0) + +mkPTokenState :: VirtualFile -> HieAST a -> PTokenState a +mkPTokenState vf ast = PTokenState mempty (Rope.fromText $ toText vf._file_text) (Char.Position 0 0) ast 0 startRange emptyRangeSplitContext + +type Parser m a = forall t . StateT (PTokenState t) m a + +updateCursor :: Monad m => Char.Position -> Parser m () +updateCursor pos = modify $ \s -> s {cursor = pos} +updateRope :: Monad m => Rope -> Parser m () +updateRope r = modify $ \s -> s {rope = r} +insertRangeIdSetMap :: Monad m => Range -> Set Identifier -> Parser m () +insertRangeIdSetMap r si = modify $ \s -> s {rangeIdSetMap = Map.insertWith (<>) r si $ rangeIdSetMap s} +addRangeIdSetMap :: Monad m => Range -> Identifier -> Parser m () +addRangeIdSetMap r i = insertRangeIdSetMap r $ S.singleton i +updateColumnsInUtf16 :: Monad m => UInt -> Parser m () +updateColumnsInUtf16 n = modify $ \s -> s {columnsInUtf16 = n} + +maybeM:: Monad m => Parser Maybe () -> Parser m () +maybeM p = do + st <- get + forM_ (execStateT p st) put + +foldAst :: Monad m => Parser m () +foldAst = do + ast <- gets currentAst + if null (nodeChildren ast) + then maybeM visitLeafIds + else do + let children = nodeChildren ast + mapM_ (\x -> (modify $ \s -> s {currentAst = x}) >> foldAst) children + +foldAndGetRangeIdSetMap :: VirtualFile -> HieAST a -> RangeIdSetMap +foldAndGetRangeIdSetMap vf ast = rangeIdSetMap $ execState foldAst (mkPTokenState vf ast) + +codePointRangeToRangeWith :: UInt -> UInt -> CodePointRange -> Range +codePointRangeToRangeWith newStartCol newEndCol (CodePointRange (CodePointPosition startLine _) (CodePointPosition endLine _)) = + Range (Position startLine newStartCol) (Position endLine newEndCol) + +-- >>> T.breakOnEnd "::;" "a::b::c" +-- ("","a::b::c") + +newColumn :: UInt -> Text -> UInt +newColumn n rp = case T.breakOnEnd "\n" rp of + ("", nEnd) -> n + fromIntegral (Rope.utf16Length $ Rope.fromText nEnd) + (_, nEnd) -> fromIntegral (Rope.utf16Length $ Rope.fromText nEnd) + + +visitLeafIds :: Parser Maybe () +visitLeafIds = maybeM $ do + leaf <- gets currentAst + pos <- gets cursor + rp <- gets rope + let span = nodeSpan leaf + (gap, token, remains) <- lift $ splitTokenAt pos rp span + cs <- gets columnsInUtf16 + let ncs = newColumn cs gap + let nce = newColumn ncs token + let ran = codePointRangeToRangeWith ncs nce $ realSrcSpanToCodePointRange span + -- ran <- lift $ codePointRangeToRange vf $ realSrcSpanToCodePointRange span + -- _ <- lift $ traceShowM (cs, ncs, nce, rangeShortStr ran) + -- set the new column to nce + updateColumnsInUtf16 nce + ranges <- lift $ splitRangeByText token ran + modify $ \s -> s {currentRange = ran, currentRangeContext = ranges} + mapM_ combineNodeIds $ Map.filterWithKey (\k _ -> k == SourceInfo) $ getSourcedNodeInfo $ sourcedNodeInfo leaf + updateCursor $ srcSpanEndCharPosition span + updateRope remains + where + combineNodeIds :: Monad m => NodeInfo a -> Parser m () + combineNodeIds (NodeInfo _ _ bd) = mapM_ getIdentifier (M.keys bd) + getIdentifier :: Monad m => Identifier -> Parser m () + getIdentifier idt = maybeM $ do + ran <- gets currentRange + case idt of + Left _moduleName -> addRangeIdSetMap ran idt + Right name -> do + ranCtx <- gets currentRangeContext + occStr <- lift $ case nameString name of + -- the generated selector name with {-# LANGUAGE DuplicateRecordFields #-} + '$':'s':'e':'l':':':xs -> Just $ takeWhile (/= ':') xs + ['$'] -> Just "$" + -- other generated names that should not be visible + '$':_ -> Nothing + ns -> Just ns + case splitResult ranCtx of + (NoSplit (tk, r)) -> do + guard $ T.unpack tk == occStr + addRangeIdSetMap r idt + (Split (tk, r1, r2)) -> do + guard $ T.unpack tk == occStr + addRangeIdSetMap r1 (Left $ mkModuleName "") + addRangeIdSetMap r2 idt + + + +splitTokenAt :: Char.Position -> Rope -> Span -> Maybe (Text, Text, Rope) +splitTokenAt pos rp span = do + (startPos, length) <- srcSpanMaybePositionLength span + let s = startPos `sub` pos + -- discard the gap between the end of the last identifier and the start of the current identifier + let (_gap, startRope) = Rope.charSplitAtPosition s rp + (token, remains) <- charSplitAtMaybe length startRope + return (Rope.toText _gap, Rope.toText token, remains) + +charSplitAtMaybe :: Word -> Rope -> Maybe (Rope, Rope) +charSplitAtMaybe len rp = do + let (prefix, suffix) = Rope.charSplitAt len rp + guard $ Rope.charLength prefix == len + return (prefix, suffix) + +srcSpanMaybePositionLength :: (Integral l) => RealSrcSpan -> Maybe (Char.Position, l) +srcSpanMaybePositionLength real = return (realSrcLocRopePosition $ realSrcSpanStart real, + fromIntegral $ (srcLocCol $ realSrcSpanEnd real) - (srcLocCol $ realSrcSpanStart real)) +realSrcLocRopePosition :: RealSrcLoc -> Char.Position +realSrcLocRopePosition real = Char.Position (fromIntegral $ srcLocLine real - 1) (fromIntegral $ srcLocCol real - 1) + +sub :: Char.Position -> Char.Position -> Char.Position +sub (Char.Position l1 c1) (Char.Position l2 c2) = + if l1 == l2 then Char.Position 0 (c1 - c2) else Char.Position (l1 - l2) c1 + +srcSpanEndCharPosition :: RealSrcSpan -> Char.Position +srcSpanEndCharPosition real = realSrcLocRopePosition $ realSrcSpanEnd real + +hieAstSpanIdentifiers :: VirtualFile -> HieAST a -> RangeIdSetMap +hieAstSpanIdentifiers = + traceShow ("hello") foldAndGetRangeIdSetMap +-- hieAstSpanIdentifiers :: VirtualFile -> HieAST a -> RangeIdSetMap +-- hieAstSpanIdentifiers vf ast = case go (mempty, Rope.fromText $ toText vf._file_text, Char.Position 0 0) ast of +-- (m, _, _) -> m +-- where +-- go :: TokenState -> HieAST a -> TokenState +-- go ts astTree = +-- if null (nodeChildren astTree) +-- then getId ts astTree +-- else foldl' go ts (nodeChildren astTree) +-- getId :: TokenState -> HieAST a -> TokenState +-- getId ts@(rm, rp, pos) leaf = fromMaybe ts $ do +-- let span = nodeSpan leaf +-- -- todo fix the range +-- ran <- codePointRangeToRange vf $ realSrcSpanToCodePointRange span +-- (_, token, remains) <- splitTokenAt pos rp span +-- -- todo fix module name +-- -- let idt = case splitRangeByText token ran of +-- -- Just (Left r) -> getNodeIds' (r, token) leaf +-- -- Just (Right (token', r1, r2)) -> M.insertWith (<>) r1 (S.singleton $ Left $ mkModuleName "") +-- -- $ getNodeIds' (r2, token') leaf +-- -- _ -> mempty +-- return (Map.unionWith (<>) rm (getNodeIds' (ran, Rope.toText token) leaf), remains, srcSpanEndCharPosition span) +-- where +-- getNodeIds' :: (Range, Text) -> HieAST a -> RangeIdSetMap +-- getNodeIds' rt = +-- Map.foldl' (combineNodeIds rt) mempty +-- . Map.filterWithKey (\k _ -> k == SourceInfo) +-- . getSourcedNodeInfo +-- . sourcedNodeInfo + +-- combineNodeIds :: (Range, Text) -> RangeIdSetMap -> NodeInfo a -> RangeIdSetMap +-- combineNodeIds rt ad (NodeInfo _ _ bd) = M.unionWith (<>) ad xs +-- where xs = M.unionsWith (<>) $ map (M.fromList . getIdentifier rt) $ M.keys bd + +-- getIdentifier :: (Range, Text) -> Identifier -> [(Range, Set Identifier)] +-- getIdentifier (ran, token) idt = case idt of +-- Left _moduleName -> [(ran, S.singleton $ Left _moduleName)] +-- Right name -> +-- let occStrMaybe = case nameString name of +-- -- the generated selector name with {-# LANGUAGE DuplicateRecordFields #-} +-- '$':'s':'e':'l':':':xs -> Just $ takeWhile (/= ':') xs +-- ['$'] -> Just "$" +-- -- other generated names that should not be visible +-- '$':_ -> Nothing +-- ns -> Just ns +-- in case occStrMaybe of +-- (Just occStr) | token == T.pack occStr -> [(ran, S.singleton $ Right name)] +-- _ -> [] + + +splitRangeByText :: Text -> Range -> Maybe RangeSplitContext +splitRangeByText tk ran = do + let (ran', tk') = case T.uncons tk of + Just ('(', xs) -> (subOneRange ran, T.takeWhile (/= ')') xs) + Just ('`', xs) -> (subOneRange ran, T.takeWhile (/= '`') xs) + _ -> (ran, tk) + let (prefix, tk'') = T.breakOnEnd "." tk' + spr <- splitRange tk'' (fromIntegral $ Rope.utf16Length $ Rope.fromText prefix) ran' + return $ RangeSplitContext tk ran spr + +splitRange :: Text -> UInt -> Range -> Maybe SplitResult +splitRange tx n ran@(Range (Position l1 c1) (Position l2 c2)) + | l1 == l2, n <= 0 = Just $ NoSplit (tx, ran) + | l1 == l2, n < fromIntegral (c2 - c1) = Just $ Split (tx, Range (Position l1 c1) (Position l1 (c1+n)), Range (Position l1 (c1+n)) (Position l1 c2)) + | otherwise = Nothing + +subOneRange :: Range -> Range +subOneRange (Range (Position l1 c1) (Position l2 c2)) = Range (Position l1 (c1 + 1)) (Position l2 (c2 - 1)) + +nameString :: Name -> String +nameString = occNameString . nameOccName + diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs index 366e17330d..86f406bfdc 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs @@ -111,10 +111,10 @@ mkRange startLine startCol len = -- while Range might not be in code points unit. -- but the comparison is still valid since we only want to know if it is potentially a qualified identifier -- or an identifier that is wrapped in () or `` -splitAndBreakModuleNameAndOccName :: VirtualFile -> Range -> Identifier -> [(Range,Identifier)] +splitAndBreakModuleNameAndOccName :: Text -> Range -> Identifier -> [(Range,Identifier)] splitAndBreakModuleNameAndOccName _ ran (Left m) = [(ran, Left m)] -splitAndBreakModuleNameAndOccName vf ran@(Range (Position startLine startColumn) (Position _endLine endColumn)) (Right name) - | nameLength name < fromIntegral (endColumn - startColumn), (Just txt) <- getTextByCodePointRangeFromVfs vf ran = +splitAndBreakModuleNameAndOccName txt ran@(Range (Position startLine startColumn) (Position _endLine endColumn)) (Right name) + | nameLength name < fromIntegral (endColumn - startColumn) = let stripFlag = peekStripFlag txt in case peekPrefixModuleNameLength txt of Just prefixLen -> @@ -164,3 +164,7 @@ getTextByCodePointRangeFromVfs vf ra = do let line' = fromIntegral line let column' = fromIntegral column Rope.Position line' column' + +rangeShortStr :: Range -> String +rangeShortStr (Range (Position startLine startColumn) (Position endLine endColumn)) = + show startLine <> ":" <> show startColumn <> "-" <> show endLine <> ":" <> show endColumn diff --git a/plugins/hls-semantic-tokens-plugin/test/Main.hs b/plugins/hls-semantic-tokens-plugin/test/Main.hs index 05dd19140b..5692017692 100644 --- a/plugins/hls-semantic-tokens-plugin/test/Main.hs +++ b/plugins/hls-semantic-tokens-plugin/test/Main.hs @@ -164,7 +164,7 @@ semanticTokensTests = testGroup "other semantic Token test" [ testCase "module import test" $ do - let file1 = "TModuleA.hs" + let file1 = "TModula𐐀bA.hs" let file2 = "TModuleB.hs" let expect = [ @@ -175,24 +175,32 @@ semanticTokensTests = Test.Hls.runSessionWithServerInTmpDir def semanticTokensPlugin (mkFs $ FS.directProjectMulti [file1, file2]) $ do doc1 <- openDoc file1 "haskell" doc2 <- openDoc file2 "haskell" - _check1 <- waitForAction "TypeCheck" doc1 + check1 <- waitForAction "TypeCheck" doc1 check2 <- waitForAction "TypeCheck" doc2 + case check1 of + Right (WaitForIdeRuleResult _) -> return () + Left _ -> error "TypeCheck1 failed" case check2 of Right (WaitForIdeRuleResult _) -> return () Left _ -> error "TypeCheck2 failed" + + textContent2 <- documentContents doc2 let vfs = VirtualFile 0 0 (Rope.fromText textContent2) res2 <- Test.getSemanticTokens doc2 - case res2 ^? Language.LSP.Protocol.Types._L of - Just tokens -> do - either - (error . show) - (\xs -> liftIO $ xs @?= expect) - $ recoverSemanticTokens def vfs tokens - return () - _ -> error "No tokens found" - liftIO $ 1 @?= 1, + result <- docSemanticTokensString def doc2 + let expect = unlines [ + "3:8-18 TModule \"TModula\\66560bA\"" + , "4:18-28 TModule \"TModula\\66560bA\"" + , "6:1-3 TVariable \"go\"" + , "6:6-10 TDataConstructor \"Game\"" + , "8:1-5 TVariable \"a\\66560bb\"" + , "8:8-19 TModule \"TModula\\66560bA.\"" + , "8:19-22 TRecordField \"a\\66560b\"" + , "8:23-25 TVariable \"go\"" + ] + liftIO $ result @?= expect, goldenWithSemanticTokensWithDefaultConfig "mixed constancy test result generated from one ghc version" "T1", goldenWithSemanticTokensWithDefaultConfig "pattern bind" "TPatternSynonym", goldenWithSemanticTokensWithDefaultConfig "type family" "TTypefamily", diff --git "a/plugins/hls-semantic-tokens-plugin/test/testdata/TModula\360\220\220\200bA.hs" "b/plugins/hls-semantic-tokens-plugin/test/testdata/TModula\360\220\220\200bA.hs" new file mode 100644 index 0000000000..f111eb396b --- /dev/null +++ "b/plugins/hls-semantic-tokens-plugin/test/testdata/TModula\360\220\220\200bA.hs" @@ -0,0 +1,5 @@ +module TModula𐐀bA where + +data Game = Game {a𐐀b :: Int} + + diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/TModuleA.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/TModuleA.hs deleted file mode 100644 index 7d2c2bb034..0000000000 --- a/plugins/hls-semantic-tokens-plugin/test/testdata/TModuleA.hs +++ /dev/null @@ -1,3 +0,0 @@ -module TModuleA where - -data Game = Game Int diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/TModuleB.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/TModuleB.hs index 15ae4a7c44..f90f0484b0 100644 --- a/plugins/hls-semantic-tokens-plugin/test/testdata/TModuleB.hs +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/TModuleB.hs @@ -1,5 +1,8 @@ module TModuleB where -import TModuleA +import TModula𐐀bA +import qualified TModula𐐀bA go = Game 1 + +a𐐀bb = TModula𐐀bA.a𐐀b go diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/TRecordDuplicateRecordFields.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/TRecordDuplicateRecordFields.hs index 7258b5fc27..395a1d3731 100644 --- a/plugins/hls-semantic-tokens-plugin/test/testdata/TRecordDuplicateRecordFields.hs +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/TRecordDuplicateRecordFields.hs @@ -2,4 +2,4 @@ module TRecordDuplicateRecordFields where -data Foo = Foo { boo :: !String } \ No newline at end of file +data Foo = Foo { boo :: !String } From 6f588ca2eefba6123731b98bb0a075e5664d5dce Mon Sep 17 00:00:00 2001 From: Patrick Date: Mon, 22 Jan 2024 21:01:10 +0800 Subject: [PATCH 25/74] clean up --- .../src/Ide/Plugin/SemanticTokens/Tokenize.hs | 341 ++++++++---------- 1 file changed, 145 insertions(+), 196 deletions(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs index 5a774706b3..e5e8104f07 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs @@ -1,264 +1,213 @@ +{-# LANGUAGE DeriveFunctor #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE DeriveFunctor #-} {-# LANGUAGE RankNTypes #-} module Ide.Plugin.SemanticTokens.Tokenize (hieAstSpanIdentifiers) where -import Data.Foldable (Foldable (foldl')) -import qualified Data.Map as M -import Control.Monad (guard) -import qualified Data.Map as Map -import Data.Maybe (fromMaybe) -import Data.Set (Set) -import qualified Data.Set as S -import qualified Data.Text as T -import Data.Text (Text) -import Development.IDE.GHC.Compat -import Development.IDE.GHC.Error (realSrcSpanToCodePointRange) -import Language.LSP.Protocol.Types (Position (Position), - Range (Range), UInt) -import Language.LSP.VFS hiding (line) -import Prelude hiding (length, span) -import Data.Text.Utf16.Rope.Mixed (Rope) -import qualified Data.Text.Utf16.Rope.Mixed as Rope -import qualified Data.Text.Rope as Char -import Data.Text.Utf16.Rope (toText) -import Control.Monad.State (State, modify, MonadState (get), gets, MonadTrans (lift), runStateT, put) +import Control.Monad (forM_, guard) +import Control.Monad.State (MonadState (get), MonadTrans (lift), State, evalStateT, execState, execStateT, gets, modify, put, runStateT) import Control.Monad.Trans.State (StateT) -import Control.Monad.State (evalStateT) -import Control.Monad.State (execStateT) -import Control.Monad (forM_) -import Control.Monad.State (execState) -import Debug.Trace (traceShow, traceShowM, trace) -import Ide.Plugin.SemanticTokens.Utils (showIdentifier, rangeShortStr) +import Data.Foldable (Foldable (foldl')) +import qualified Data.Map as M +import qualified Data.Map as Map +import Data.Maybe (fromMaybe) +import Data.Set (Set) +import qualified Data.Set as S +import Data.Text (Text) +import qualified Data.Text as T +import qualified Data.Text.Rope as Char +import Data.Text.Utf16.Rope (toText) import qualified Data.Text.Utf16.Rope as Utf16 +import Data.Text.Utf16.Rope.Mixed (Rope) +import qualified Data.Text.Utf16.Rope.Mixed as Rope +import Debug.Trace (trace, traceShow, traceShowM) import Development.IDE (pretty) - - +import Development.IDE.GHC.Compat +import Development.IDE.GHC.Error (realSrcSpanToCodePointRange) +import Ide.Plugin.SemanticTokens.Utils (rangeShortStr, showIdentifier) +import Language.LSP.Protocol.Types + ( Position (Position), + Range (Range), + UInt, + ) +import Language.LSP.VFS hiding (line) +import Prelude hiding (length, span) type RangeIdSetMap = Map.Map Range (Set Identifier) -type TokenState = (RangeIdSetMap, Rope, Char.Position) -data PTokenState t = PTokenState { - rangeIdSetMap :: RangeIdSetMap - , rope :: Rope - , cursor :: Char.Position - , currentAst :: HieAST t - , columnsInUtf16 :: UInt - , currentRange :: Range - , currentRangeContext :: RangeSplitContext - } -data RangeSplitContext = RangeSplitContext { - fullText :: Text, - fullRange :: Range, - splitResult :: SplitResult -} deriving (Show) -emptyRangeSplitContext :: RangeSplitContext -emptyRangeSplitContext = RangeSplitContext "" (Range (Position 0 0) (Position 0 0)) (NoSplit ("", Range (Position 0 0) (Position 0 0))) +type TokenState = (RangeIdSetMap, Rope, Char.Position) -data SplitResult = - NoSplit (Text, Range) | - -- token text, prefix range(module range), token range - Split (Text, Range, Range) deriving (Show) +data PTokenState t = PTokenState + { rangeIdSetMap :: RangeIdSetMap, + rope :: Rope, + cursor :: Char.Position, + currentAst :: HieAST t, + columnsInUtf16 :: UInt, + currentRange :: Range, + currentRangeContext :: SplitResult + } + +data SplitResult + = NoSplit (Text, Range) + | -- token text, prefix range(module range), token range + Split (Text, Range, Range) + deriving (Show) startRange :: Range startRange = Range (Position 0 0) (Position 0 0) mkPTokenState :: VirtualFile -> HieAST a -> PTokenState a -mkPTokenState vf ast = PTokenState mempty (Rope.fromText $ toText vf._file_text) (Char.Position 0 0) ast 0 startRange emptyRangeSplitContext +mkPTokenState vf ast = + PTokenState + { rangeIdSetMap = mempty, + rope = Rope.fromText $ toText vf._file_text, + cursor = Char.Position 0 0, + currentAst = ast, + columnsInUtf16 = 0, + currentRange = startRange, + currentRangeContext = NoSplit ("", startRange) + } -type Parser m a = forall t . StateT (PTokenState t) m a +type Parser m a = forall t. StateT (PTokenState t) m a -updateCursor :: Monad m => Char.Position -> Parser m () +updateCursor :: (Monad m) => Char.Position -> Parser m () updateCursor pos = modify $ \s -> s {cursor = pos} -updateRope :: Monad m => Rope -> Parser m () + +updateRope :: (Monad m) => Rope -> Parser m () updateRope r = modify $ \s -> s {rope = r} -insertRangeIdSetMap :: Monad m => Range -> Set Identifier -> Parser m () + +insertRangeIdSetMap :: (Monad m) => Range -> Set Identifier -> Parser m () insertRangeIdSetMap r si = modify $ \s -> s {rangeIdSetMap = Map.insertWith (<>) r si $ rangeIdSetMap s} -addRangeIdSetMap :: Monad m => Range -> Identifier -> Parser m () + +addRangeIdSetMap :: (Monad m) => Range -> Identifier -> Parser m () addRangeIdSetMap r i = insertRangeIdSetMap r $ S.singleton i -updateColumnsInUtf16 :: Monad m => UInt -> Parser m () + +updateColumnsInUtf16 :: (Monad m) => UInt -> Parser m () updateColumnsInUtf16 n = modify $ \s -> s {columnsInUtf16 = n} -maybeM:: Monad m => Parser Maybe () -> Parser m () +maybeM :: (Monad m) => Parser Maybe () -> Parser m () maybeM p = do - st <- get - forM_ (execStateT p st) put + st <- get + forM_ (execStateT p st) put -foldAst :: Monad m => Parser m () +foldAst :: (Monad m) => Parser m () foldAst = do - ast <- gets currentAst - if null (nodeChildren ast) + ast <- gets currentAst + if null (nodeChildren ast) then maybeM visitLeafIds else do - let children = nodeChildren ast - mapM_ (\x -> (modify $ \s -> s {currentAst = x}) >> foldAst) children + let children = nodeChildren ast + mapM_ (\x -> (modify $ \s -> s {currentAst = x}) >> foldAst) children -foldAndGetRangeIdSetMap :: VirtualFile -> HieAST a -> RangeIdSetMap -foldAndGetRangeIdSetMap vf ast = rangeIdSetMap $ execState foldAst (mkPTokenState vf ast) +hieAstSpanIdentifiers :: VirtualFile -> HieAST a -> RangeIdSetMap +hieAstSpanIdentifiers vf ast = rangeIdSetMap $ execState foldAst (mkPTokenState vf ast) codePointRangeToRangeWith :: UInt -> UInt -> CodePointRange -> Range codePointRangeToRangeWith newStartCol newEndCol (CodePointRange (CodePointPosition startLine _) (CodePointPosition endLine _)) = - Range (Position startLine newStartCol) (Position endLine newEndCol) - --- >>> T.breakOnEnd "::;" "a::b::c" --- ("","a::b::c") + Range (Position startLine newStartCol) (Position endLine newEndCol) newColumn :: UInt -> Text -> UInt newColumn n rp = case T.breakOnEnd "\n" rp of - ("", nEnd) -> n + fromIntegral (Rope.utf16Length $ Rope.fromText nEnd) - (_, nEnd) -> fromIntegral (Rope.utf16Length $ Rope.fromText nEnd) - + ("", nEnd) -> n + fromIntegral (Rope.utf16Length $ Rope.fromText nEnd) + (_, nEnd) -> fromIntegral (Rope.utf16Length $ Rope.fromText nEnd) visitLeafIds :: Parser Maybe () visitLeafIds = maybeM $ do - leaf <- gets currentAst - pos <- gets cursor - rp <- gets rope - let span = nodeSpan leaf - (gap, token, remains) <- lift $ splitTokenAt pos rp span - cs <- gets columnsInUtf16 - let ncs = newColumn cs gap - let nce = newColumn ncs token - let ran = codePointRangeToRangeWith ncs nce $ realSrcSpanToCodePointRange span - -- ran <- lift $ codePointRangeToRange vf $ realSrcSpanToCodePointRange span - -- _ <- lift $ traceShowM (cs, ncs, nce, rangeShortStr ran) - -- set the new column to nce - updateColumnsInUtf16 nce - ranges <- lift $ splitRangeByText token ran - modify $ \s -> s {currentRange = ran, currentRangeContext = ranges} - mapM_ combineNodeIds $ Map.filterWithKey (\k _ -> k == SourceInfo) $ getSourcedNodeInfo $ sourcedNodeInfo leaf - updateCursor $ srcSpanEndCharPosition span - updateRope remains - where - combineNodeIds :: Monad m => NodeInfo a -> Parser m () - combineNodeIds (NodeInfo _ _ bd) = mapM_ getIdentifier (M.keys bd) - getIdentifier :: Monad m => Identifier -> Parser m () - getIdentifier idt = maybeM $ do - ran <- gets currentRange - case idt of - Left _moduleName -> addRangeIdSetMap ran idt - Right name -> do - ranCtx <- gets currentRangeContext - occStr <- lift $ case nameString name of - -- the generated selector name with {-# LANGUAGE DuplicateRecordFields #-} - '$':'s':'e':'l':':':xs -> Just $ takeWhile (/= ':') xs - ['$'] -> Just "$" - -- other generated names that should not be visible - '$':_ -> Nothing - ns -> Just ns - case splitResult ranCtx of - (NoSplit (tk, r)) -> do - guard $ T.unpack tk == occStr - addRangeIdSetMap r idt - (Split (tk, r1, r2)) -> do - guard $ T.unpack tk == occStr - addRangeIdSetMap r1 (Left $ mkModuleName "") - addRangeIdSetMap r2 idt - - + leaf <- gets currentAst + pos <- gets cursor + rp <- gets rope + let span = nodeSpan leaf + (gap, token, remains) <- lift $ splitTokenAt pos rp span + cs <- gets columnsInUtf16 + let ncs = newColumn cs gap + let nce = newColumn ncs token + let ran = codePointRangeToRangeWith ncs nce $ realSrcSpanToCodePointRange span + -- ran <- lift $ codePointRangeToRange vf $ realSrcSpanToCodePointRange span + -- _ <- lift $ traceShowM (cs, ncs, nce, rangeShortStr ran) + -- set the new column to nce + updateColumnsInUtf16 nce + ranges <- lift $ splitRangeByText token ran + modify $ \s -> s {currentRange = ran, currentRangeContext = ranges} + mapM_ combineNodeIds $ Map.filterWithKey (\k _ -> k == SourceInfo) $ getSourcedNodeInfo $ sourcedNodeInfo leaf + updateCursor $ srcSpanEndCharPosition span + updateRope remains + where + combineNodeIds :: (Monad m) => NodeInfo a -> Parser m () + combineNodeIds (NodeInfo _ _ bd) = mapM_ getIdentifier (M.keys bd) + getIdentifier :: (Monad m) => Identifier -> Parser m () + getIdentifier idt = maybeM $ do + ran <- gets currentRange + case idt of + Left _moduleName -> addRangeIdSetMap ran idt + Right name -> do + ranSplit <- gets currentRangeContext + occStr <- lift $ case nameString name of + -- the generated selector name with {-# LANGUAGE DuplicateRecordFields #-} + '$' : 's' : 'e' : 'l' : ':' : xs -> Just $ takeWhile (/= ':') xs + ['$'] -> Just "$" + -- other generated names that should not be visible + '$' : _ -> Nothing + ns -> Just ns + case ranSplit of + (NoSplit (tk, r)) -> do + guard $ T.unpack tk == occStr + addRangeIdSetMap r idt + (Split (tk, r1, r2)) -> do + guard $ T.unpack tk == occStr + addRangeIdSetMap r1 (Left $ mkModuleName "") + addRangeIdSetMap r2 idt splitTokenAt :: Char.Position -> Rope -> Span -> Maybe (Text, Text, Rope) splitTokenAt pos rp span = do - (startPos, length) <- srcSpanMaybePositionLength span - let s = startPos `sub` pos - -- discard the gap between the end of the last identifier and the start of the current identifier - let (_gap, startRope) = Rope.charSplitAtPosition s rp - (token, remains) <- charSplitAtMaybe length startRope - return (Rope.toText _gap, Rope.toText token, remains) + (startPos, length) <- srcSpanMaybePositionLength span + let s = startPos `sub` pos + -- discard the gap between the end of the last identifier and the start of the current identifier + let (_gap, startRope) = Rope.charSplitAtPosition s rp + (token, remains) <- charSplitAtMaybe length startRope + return (Rope.toText _gap, Rope.toText token, remains) charSplitAtMaybe :: Word -> Rope -> Maybe (Rope, Rope) charSplitAtMaybe len rp = do - let (prefix, suffix) = Rope.charSplitAt len rp - guard $ Rope.charLength prefix == len - return (prefix, suffix) + let (prefix, suffix) = Rope.charSplitAt len rp + guard $ Rope.charLength prefix == len + return (prefix, suffix) srcSpanMaybePositionLength :: (Integral l) => RealSrcSpan -> Maybe (Char.Position, l) -srcSpanMaybePositionLength real = return (realSrcLocRopePosition $ realSrcSpanStart real, - fromIntegral $ (srcLocCol $ realSrcSpanEnd real) - (srcLocCol $ realSrcSpanStart real)) +srcSpanMaybePositionLength real = + return + ( realSrcLocRopePosition $ realSrcSpanStart real, + fromIntegral $ (srcLocCol $ realSrcSpanEnd real) - (srcLocCol $ realSrcSpanStart real) + ) + realSrcLocRopePosition :: RealSrcLoc -> Char.Position realSrcLocRopePosition real = Char.Position (fromIntegral $ srcLocLine real - 1) (fromIntegral $ srcLocCol real - 1) sub :: Char.Position -> Char.Position -> Char.Position sub (Char.Position l1 c1) (Char.Position l2 c2) = - if l1 == l2 then Char.Position 0 (c1 - c2) else Char.Position (l1 - l2) c1 + if l1 == l2 then Char.Position 0 (c1 - c2) else Char.Position (l1 - l2) c1 srcSpanEndCharPosition :: RealSrcSpan -> Char.Position srcSpanEndCharPosition real = realSrcLocRopePosition $ realSrcSpanEnd real -hieAstSpanIdentifiers :: VirtualFile -> HieAST a -> RangeIdSetMap -hieAstSpanIdentifiers = - traceShow ("hello") foldAndGetRangeIdSetMap --- hieAstSpanIdentifiers :: VirtualFile -> HieAST a -> RangeIdSetMap --- hieAstSpanIdentifiers vf ast = case go (mempty, Rope.fromText $ toText vf._file_text, Char.Position 0 0) ast of --- (m, _, _) -> m --- where --- go :: TokenState -> HieAST a -> TokenState --- go ts astTree = --- if null (nodeChildren astTree) --- then getId ts astTree --- else foldl' go ts (nodeChildren astTree) --- getId :: TokenState -> HieAST a -> TokenState --- getId ts@(rm, rp, pos) leaf = fromMaybe ts $ do --- let span = nodeSpan leaf --- -- todo fix the range --- ran <- codePointRangeToRange vf $ realSrcSpanToCodePointRange span --- (_, token, remains) <- splitTokenAt pos rp span --- -- todo fix module name --- -- let idt = case splitRangeByText token ran of --- -- Just (Left r) -> getNodeIds' (r, token) leaf --- -- Just (Right (token', r1, r2)) -> M.insertWith (<>) r1 (S.singleton $ Left $ mkModuleName "") --- -- $ getNodeIds' (r2, token') leaf --- -- _ -> mempty --- return (Map.unionWith (<>) rm (getNodeIds' (ran, Rope.toText token) leaf), remains, srcSpanEndCharPosition span) --- where --- getNodeIds' :: (Range, Text) -> HieAST a -> RangeIdSetMap --- getNodeIds' rt = --- Map.foldl' (combineNodeIds rt) mempty --- . Map.filterWithKey (\k _ -> k == SourceInfo) --- . getSourcedNodeInfo --- . sourcedNodeInfo - --- combineNodeIds :: (Range, Text) -> RangeIdSetMap -> NodeInfo a -> RangeIdSetMap --- combineNodeIds rt ad (NodeInfo _ _ bd) = M.unionWith (<>) ad xs --- where xs = M.unionsWith (<>) $ map (M.fromList . getIdentifier rt) $ M.keys bd - --- getIdentifier :: (Range, Text) -> Identifier -> [(Range, Set Identifier)] --- getIdentifier (ran, token) idt = case idt of --- Left _moduleName -> [(ran, S.singleton $ Left _moduleName)] --- Right name -> --- let occStrMaybe = case nameString name of --- -- the generated selector name with {-# LANGUAGE DuplicateRecordFields #-} --- '$':'s':'e':'l':':':xs -> Just $ takeWhile (/= ':') xs --- ['$'] -> Just "$" --- -- other generated names that should not be visible --- '$':_ -> Nothing --- ns -> Just ns --- in case occStrMaybe of --- (Just occStr) | token == T.pack occStr -> [(ran, S.singleton $ Right name)] --- _ -> [] - - -splitRangeByText :: Text -> Range -> Maybe RangeSplitContext +splitRangeByText :: Text -> Range -> Maybe SplitResult splitRangeByText tk ran = do - let (ran', tk') = case T.uncons tk of - Just ('(', xs) -> (subOneRange ran, T.takeWhile (/= ')') xs) - Just ('`', xs) -> (subOneRange ran, T.takeWhile (/= '`') xs) - _ -> (ran, tk) - let (prefix, tk'') = T.breakOnEnd "." tk' - spr <- splitRange tk'' (fromIntegral $ Rope.utf16Length $ Rope.fromText prefix) ran' - return $ RangeSplitContext tk ran spr + let (ran', tk') = case T.uncons tk of + Just ('(', xs) -> (subOneRange ran, T.takeWhile (/= ')') xs) + Just ('`', xs) -> (subOneRange ran, T.takeWhile (/= '`') xs) + _ -> (ran, tk) + let (prefix, tk'') = T.breakOnEnd "." tk' + spr <- splitRange tk'' (fromIntegral $ Rope.utf16Length $ Rope.fromText prefix) ran' + return spr splitRange :: Text -> UInt -> Range -> Maybe SplitResult splitRange tx n ran@(Range (Position l1 c1) (Position l2 c2)) - | l1 == l2, n <= 0 = Just $ NoSplit (tx, ran) - | l1 == l2, n < fromIntegral (c2 - c1) = Just $ Split (tx, Range (Position l1 c1) (Position l1 (c1+n)), Range (Position l1 (c1+n)) (Position l1 c2)) - | otherwise = Nothing + | l1 == l2, n <= 0 = Just $ NoSplit (tx, ran) + | l1 == l2, n < fromIntegral (c2 - c1) = Just $ Split (tx, Range (Position l1 c1) (Position l1 (c1 + n)), Range (Position l1 (c1 + n)) (Position l1 c2)) + | otherwise = Nothing subOneRange :: Range -> Range subOneRange (Range (Position l1 c1) (Position l2 c2)) = Range (Position l1 (c1 + 1)) (Position l2 (c2 - 1)) nameString :: Name -> String nameString = occNameString . nameOccName - From 4236ca6310ed42f5495d763893cf7f3a46e6dd32 Mon Sep 17 00:00:00 2001 From: Patrick Date: Mon, 22 Jan 2024 21:36:32 +0800 Subject: [PATCH 26/74] clean up --- .../src/Ide/Plugin/SemanticTokens/Tokenize.hs | 202 +++++++++--------- .../src/Ide/Plugin/SemanticTokens/Utils.hs | 1 + 2 files changed, 99 insertions(+), 104 deletions(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs index e5e8104f07..566c49d6d1 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs @@ -5,27 +5,22 @@ module Ide.Plugin.SemanticTokens.Tokenize (hieAstSpanIdentifiers) where +import Control.Lens (Identity (runIdentity)) import Control.Monad (forM_, guard) -import Control.Monad.State (MonadState (get), MonadTrans (lift), State, evalStateT, execState, execStateT, gets, modify, put, runStateT) +import Control.Monad.State (MonadState (get), MonadTrans (lift), execState, execStateT, gets, modify, put, runStateT) import Control.Monad.Trans.State (StateT) -import Data.Foldable (Foldable (foldl')) import qualified Data.Map as M import qualified Data.Map as Map -import Data.Maybe (fromMaybe) import Data.Set (Set) import qualified Data.Set as S import Data.Text (Text) import qualified Data.Text as T import qualified Data.Text.Rope as Char import Data.Text.Utf16.Rope (toText) -import qualified Data.Text.Utf16.Rope as Utf16 import Data.Text.Utf16.Rope.Mixed (Rope) import qualified Data.Text.Utf16.Rope.Mixed as Rope -import Debug.Trace (trace, traceShow, traceShowM) -import Development.IDE (pretty) import Development.IDE.GHC.Compat import Development.IDE.GHC.Error (realSrcSpanToCodePointRange) -import Ide.Plugin.SemanticTokens.Utils (rangeShortStr, showIdentifier) import Language.LSP.Protocol.Types ( Position (Position), Range (Range), @@ -34,9 +29,9 @@ import Language.LSP.Protocol.Types import Language.LSP.VFS hiding (line) import Prelude hiding (length, span) -type RangeIdSetMap = Map.Map Range (Set Identifier) +type Tokenizer m a = forall t. StateT (PTokenState t) m a -type TokenState = (RangeIdSetMap, Rope, Char.Position) +type RangeIdSetMap = Map.Map Range (Set Identifier) data PTokenState t = PTokenState { rangeIdSetMap :: RangeIdSetMap, @@ -48,10 +43,12 @@ data PTokenState t = PTokenState currentRangeContext :: SplitResult } +runTokenizer :: (Monad m) => Tokenizer m a -> PTokenState t -> m RangeIdSetMap +runTokenizer p st = rangeIdSetMap <$> execStateT p st + data SplitResult - = NoSplit (Text, Range) - | -- token text, prefix range(module range), token range - Split (Text, Range, Range) + = NoSplit (Text, Range) -- does not need to split, token text, token range + | Split (Text, Range, Range) -- token text, prefix range(module range), token range deriving (Show) startRange :: Range @@ -69,80 +66,55 @@ mkPTokenState vf ast = currentRangeContext = NoSplit ("", startRange) } -type Parser m a = forall t. StateT (PTokenState t) m a - -updateCursor :: (Monad m) => Char.Position -> Parser m () +updateCursor :: (Monad m) => Char.Position -> Tokenizer m () updateCursor pos = modify $ \s -> s {cursor = pos} -updateRope :: (Monad m) => Rope -> Parser m () +updateRope :: (Monad m) => Rope -> Tokenizer m () updateRope r = modify $ \s -> s {rope = r} -insertRangeIdSetMap :: (Monad m) => Range -> Set Identifier -> Parser m () -insertRangeIdSetMap r si = modify $ \s -> s {rangeIdSetMap = Map.insertWith (<>) r si $ rangeIdSetMap s} +addRangeIdSetMap :: (Monad m) => Range -> Identifier -> Tokenizer m () +addRangeIdSetMap r i = modify $ \s -> s {rangeIdSetMap = Map.insertWith (<>) r (S.singleton i) $ rangeIdSetMap s} -addRangeIdSetMap :: (Monad m) => Range -> Identifier -> Parser m () -addRangeIdSetMap r i = insertRangeIdSetMap r $ S.singleton i - -updateColumnsInUtf16 :: (Monad m) => UInt -> Parser m () +updateColumnsInUtf16 :: (Monad m) => UInt -> Tokenizer m () updateColumnsInUtf16 n = modify $ \s -> s {columnsInUtf16 = n} -maybeM :: (Monad m) => Parser Maybe () -> Parser m () -maybeM p = do +liftMaybeM :: (Monad m) => Tokenizer Maybe () -> Tokenizer m () +liftMaybeM p = do st <- get forM_ (execStateT p st) put -foldAst :: (Monad m) => Parser m () +hieAstSpanIdentifiers :: VirtualFile -> HieAST a -> RangeIdSetMap +hieAstSpanIdentifiers vf ast = runIdentity $ runTokenizer foldAst (mkPTokenState vf ast) + +-- | foldAst +-- visit every leaf node in the ast in depth first order +foldAst :: (Monad m) => Tokenizer m () foldAst = do ast <- gets currentAst if null (nodeChildren ast) - then maybeM visitLeafIds + then liftMaybeM visitLeafIds else do let children = nodeChildren ast mapM_ (\x -> (modify $ \s -> s {currentAst = x}) >> foldAst) children -hieAstSpanIdentifiers :: VirtualFile -> HieAST a -> RangeIdSetMap -hieAstSpanIdentifiers vf ast = rangeIdSetMap $ execState foldAst (mkPTokenState vf ast) - -codePointRangeToRangeWith :: UInt -> UInt -> CodePointRange -> Range -codePointRangeToRangeWith newStartCol newEndCol (CodePointRange (CodePointPosition startLine _) (CodePointPosition endLine _)) = - Range (Position startLine newStartCol) (Position endLine newEndCol) - -newColumn :: UInt -> Text -> UInt -newColumn n rp = case T.breakOnEnd "\n" rp of - ("", nEnd) -> n + fromIntegral (Rope.utf16Length $ Rope.fromText nEnd) - (_, nEnd) -> fromIntegral (Rope.utf16Length $ Rope.fromText nEnd) - -visitLeafIds :: Parser Maybe () -visitLeafIds = maybeM $ do +visitLeafIds :: Tokenizer Maybe () +visitLeafIds = liftMaybeM $ do leaf <- gets currentAst - pos <- gets cursor - rp <- gets rope - let span = nodeSpan leaf - (gap, token, remains) <- lift $ splitTokenAt pos rp span - cs <- gets columnsInUtf16 - let ncs = newColumn cs gap - let nce = newColumn ncs token - let ran = codePointRangeToRangeWith ncs nce $ realSrcSpanToCodePointRange span - -- ran <- lift $ codePointRangeToRange vf $ realSrcSpanToCodePointRange span - -- _ <- lift $ traceShowM (cs, ncs, nce, rangeShortStr ran) - -- set the new column to nce - updateColumnsInUtf16 nce - ranges <- lift $ splitRangeByText token ran - modify $ \s -> s {currentRange = ran, currentRangeContext = ranges} + (ran, token) <- focusTokenAt leaf + splitResult <- lift $ splitRangeByText token ran + modify $ \s -> s {currentRange = ran, currentRangeContext = splitResult} mapM_ combineNodeIds $ Map.filterWithKey (\k _ -> k == SourceInfo) $ getSourcedNodeInfo $ sourcedNodeInfo leaf - updateCursor $ srcSpanEndCharPosition span - updateRope remains where - combineNodeIds :: (Monad m) => NodeInfo a -> Parser m () + combineNodeIds :: (Monad m) => NodeInfo a -> Tokenizer m () combineNodeIds (NodeInfo _ _ bd) = mapM_ getIdentifier (M.keys bd) - getIdentifier :: (Monad m) => Identifier -> Parser m () - getIdentifier idt = maybeM $ do + getIdentifier :: (Monad m) => Identifier -> Tokenizer m () + getIdentifier idt = liftMaybeM $ do ran <- gets currentRange case idt of Left _moduleName -> addRangeIdSetMap ran idt Right name -> do ranSplit <- gets currentRangeContext - occStr <- lift $ case nameString name of + occStr <- lift $ case (occNameString . nameOccName) name of -- the generated selector name with {-# LANGUAGE DuplicateRecordFields #-} '$' : 's' : 'e' : 'l' : ':' : xs -> Just $ takeWhile (/= ':') xs ['$'] -> Just "$" @@ -158,38 +130,64 @@ visitLeafIds = maybeM $ do addRangeIdSetMap r1 (Left $ mkModuleName "") addRangeIdSetMap r2 idt -splitTokenAt :: Char.Position -> Rope -> Span -> Maybe (Text, Text, Rope) -splitTokenAt pos rp span = do - (startPos, length) <- srcSpanMaybePositionLength span - let s = startPos `sub` pos - -- discard the gap between the end of the last identifier and the start of the current identifier - let (_gap, startRope) = Rope.charSplitAtPosition s rp - (token, remains) <- charSplitAtMaybe length startRope - return (Rope.toText _gap, Rope.toText token, remains) - -charSplitAtMaybe :: Word -> Rope -> Maybe (Rope, Rope) -charSplitAtMaybe len rp = do - let (prefix, suffix) = Rope.charSplitAt len rp - guard $ Rope.charLength prefix == len - return (prefix, suffix) - -srcSpanMaybePositionLength :: (Integral l) => RealSrcSpan -> Maybe (Char.Position, l) -srcSpanMaybePositionLength real = - return - ( realSrcLocRopePosition $ realSrcSpanStart real, - fromIntegral $ (srcLocCol $ realSrcSpanEnd real) - (srcLocCol $ realSrcSpanStart real) - ) - -realSrcLocRopePosition :: RealSrcLoc -> Char.Position -realSrcLocRopePosition real = Char.Position (fromIntegral $ srcLocLine real - 1) (fromIntegral $ srcLocCol real - 1) - -sub :: Char.Position -> Char.Position -> Char.Position -sub (Char.Position l1 c1) (Char.Position l2 c2) = - if l1 == l2 then Char.Position 0 (c1 - c2) else Char.Position (l1 - l2) c1 - -srcSpanEndCharPosition :: RealSrcSpan -> Char.Position -srcSpanEndCharPosition real = realSrcLocRopePosition $ realSrcSpanEnd real - +focusTokenAt :: + -- | leaf node we want to focus on + HieAST a -> + -- | (token, remains) + Tokenizer Maybe (Range, Text) +focusTokenAt leaf = do + rp <- gets rope + cur <- gets cursor + cs <- gets columnsInUtf16 + let span = nodeSpan leaf + (startPos, length) <- lift $ srcSpanMaybePositionLength span + let (gap, startRope) = Rope.charSplitAtPosition (startPos `sub` cur) rp + (token, remains) <- lift $ charSplitAtMaybe length startRope + let tokenText = Rope.toText token + let ncs = newColumn cs $ Rope.toText gap + let nce = newColumn ncs tokenText + -- compute the new range for utf16 + let ran = codePointRangeToRangeWith ncs nce $ realSrcSpanToCodePointRange span + updateColumnsInUtf16 nce + updateRope remains + updateCursor $ srcSpanEndCharPosition span + return (ran, tokenText) + where + srcSpanMaybePositionLength :: (Integral l) => RealSrcSpan -> Maybe (Char.Position, l) + srcSpanMaybePositionLength real = + return + ( realSrcLocRopePosition $ realSrcSpanStart real, + fromIntegral $ (srcLocCol $ realSrcSpanEnd real) - (srcLocCol $ realSrcSpanStart real) + ) + charSplitAtMaybe :: Word -> Rope -> Maybe (Rope, Rope) + charSplitAtMaybe len rpe = do + let (prefix, suffix) = Rope.charSplitAt len rpe + guard $ Rope.charLength prefix == len + return (prefix, suffix) + sub :: Char.Position -> Char.Position -> Char.Position + sub (Char.Position l1 c1) (Char.Position l2 c2) = + if l1 == l2 then Char.Position 0 (c1 - c2) else Char.Position (l1 - l2) c1 + realSrcLocRopePosition :: RealSrcLoc -> Char.Position + realSrcLocRopePosition real = Char.Position (fromIntegral $ srcLocLine real - 1) (fromIntegral $ srcLocCol real - 1) + srcSpanEndCharPosition :: RealSrcSpan -> Char.Position + srcSpanEndCharPosition real = realSrcLocRopePosition $ realSrcSpanEnd real + newColumn :: UInt -> Text -> UInt + newColumn n rp = case T.breakOnEnd "\n" rp of + ("", nEnd) -> n + fromIntegral (Rope.utf16Length $ Rope.fromText nEnd) + (_, nEnd) -> fromIntegral (Rope.utf16Length $ Rope.fromText nEnd) + codePointRangeToRangeWith :: UInt -> UInt -> CodePointRange -> Range + codePointRangeToRangeWith newStartCol newEndCol (CodePointRange (CodePointPosition startLine _) (CodePointPosition endLine _)) = + Range (Position startLine newStartCol) (Position endLine newEndCol) + +-- | splitRangeByText +-- split a qualified identifier into module name and identifier and/or strip the (), `` +-- for `ModuleA.b`, break it into `ModuleA.` and `b` +-- for `(b)`, strip `()`, and get `b` +-- for `(ModuleA.b)`, strip `()` and break it into `ModuleA.` and `b` +-- nameLength get the length of the `b` in code points unit +-- while Range might not be in code points unit. +-- but the comparison is still valid since we only want to know if it is potentially a qualified identifier +-- or an identifier that is wrapped in () or `` splitRangeByText :: Text -> Range -> Maybe SplitResult splitRangeByText tk ran = do let (ran', tk') = case T.uncons tk of @@ -199,15 +197,11 @@ splitRangeByText tk ran = do let (prefix, tk'') = T.breakOnEnd "." tk' spr <- splitRange tk'' (fromIntegral $ Rope.utf16Length $ Rope.fromText prefix) ran' return spr - -splitRange :: Text -> UInt -> Range -> Maybe SplitResult -splitRange tx n ran@(Range (Position l1 c1) (Position l2 c2)) - | l1 == l2, n <= 0 = Just $ NoSplit (tx, ran) - | l1 == l2, n < fromIntegral (c2 - c1) = Just $ Split (tx, Range (Position l1 c1) (Position l1 (c1 + n)), Range (Position l1 (c1 + n)) (Position l1 c2)) - | otherwise = Nothing - -subOneRange :: Range -> Range -subOneRange (Range (Position l1 c1) (Position l2 c2)) = Range (Position l1 (c1 + 1)) (Position l2 (c2 - 1)) - -nameString :: Name -> String -nameString = occNameString . nameOccName + where + splitRange :: Text -> UInt -> Range -> Maybe SplitResult + splitRange tx n r@(Range (Position l1 c1) (Position l2 c2)) + | l1 == l2, n <= 0 = Just $ NoSplit (tx, r) + | l1 == l2, n < fromIntegral (c2 - c1) = Just $ Split (tx, Range (Position l1 c1) (Position l1 (c1 + n)), Range (Position l1 (c1 + n)) (Position l1 c2)) + | otherwise = Nothing + subOneRange :: Range -> Range + subOneRange (Range (Position l1 c1) (Position l2 c2)) = Range (Position l1 (c1 + 1)) (Position l2 (c2 - 1)) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs index 86f406bfdc..a453888277 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs @@ -168,3 +168,4 @@ getTextByCodePointRangeFromVfs vf ra = do rangeShortStr :: Range -> String rangeShortStr (Range (Position startLine startColumn) (Position endLine endColumn)) = show startLine <> ":" <> show startColumn <> "-" <> show endLine <> ":" <> show endColumn + From 65d493d20180a10eff4a5163fbeb38519fb6d213 Mon Sep 17 00:00:00 2001 From: Patrick Date: Mon, 22 Jan 2024 21:59:34 +0800 Subject: [PATCH 27/74] stylish --- .../src/Ide/Plugin/SemanticTokens/Internal.hs | 2 +- .../src/Ide/Plugin/SemanticTokens/Query.hs | 3 +- .../src/Ide/Plugin/SemanticTokens/Tokenize.hs | 74 +++++++++---------- 3 files changed, 40 insertions(+), 39 deletions(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs index 409bd9a160..547330a474 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs @@ -58,6 +58,7 @@ import Ide.Plugin.Error (PluginError (PluginIn import Ide.Plugin.SemanticTokens.Mappings import Ide.Plugin.SemanticTokens.Query import Ide.Plugin.SemanticTokens.SemanticConfig (mkSemanticConfigFunctions) +import Ide.Plugin.SemanticTokens.Tokenize (hieAstSpanIdentifiers) import Ide.Plugin.SemanticTokens.Types import Ide.Types import qualified Language.LSP.Protocol.Lens as L @@ -66,7 +67,6 @@ import Language.LSP.Protocol.Types (NormalizedFilePath, SemanticTokens, type (|?) (InL)) import Prelude hiding (span) -import Ide.Plugin.SemanticTokens.Tokenize (hieAstSpanIdentifiers) $mkSemanticConfigFunctions diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs index e1919c3dd5..f817418a70 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs @@ -26,7 +26,8 @@ import Ide.Plugin.SemanticTokens.Types (HieFunMaskKind, HsSemanticTokenType (TModule), NameSemanticMap, SemanticTokensConfig) -import Ide.Plugin.SemanticTokens.Utils (splitAndBreakModuleNameAndOccName, getTextByCodePointRangeFromVfs) +import Ide.Plugin.SemanticTokens.Utils (getTextByCodePointRangeFromVfs, + splitAndBreakModuleNameAndOccName) import Language.LSP.Protocol.Types (Position (Position), Range (Range), SemanticTokenAbsolute (SemanticTokenAbsolute), diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs index 566c49d6d1..9c095f7c0e 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs @@ -1,45 +1,45 @@ -{-# LANGUAGE DeriveFunctor #-} +{-# LANGUAGE DeriveFunctor #-} {-# LANGUAGE OverloadedRecordDot #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE RankNTypes #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RankNTypes #-} module Ide.Plugin.SemanticTokens.Tokenize (hieAstSpanIdentifiers) where -import Control.Lens (Identity (runIdentity)) -import Control.Monad (forM_, guard) -import Control.Monad.State (MonadState (get), MonadTrans (lift), execState, execStateT, gets, modify, put, runStateT) -import Control.Monad.Trans.State (StateT) -import qualified Data.Map as M -import qualified Data.Map as Map -import Data.Set (Set) -import qualified Data.Set as S -import Data.Text (Text) -import qualified Data.Text as T -import qualified Data.Text.Rope as Char -import Data.Text.Utf16.Rope (toText) -import Data.Text.Utf16.Rope.Mixed (Rope) -import qualified Data.Text.Utf16.Rope.Mixed as Rope -import Development.IDE.GHC.Compat -import Development.IDE.GHC.Error (realSrcSpanToCodePointRange) -import Language.LSP.Protocol.Types - ( Position (Position), - Range (Range), - UInt, - ) -import Language.LSP.VFS hiding (line) -import Prelude hiding (length, span) +import Control.Lens (Identity (runIdentity)) +import Control.Monad (forM_, guard) +import Control.Monad.State (MonadState (get), + MonadTrans (lift), execState, + execStateT, gets, modify, put, + runStateT) +import Control.Monad.Trans.State (StateT) +import qualified Data.Map as M +import qualified Data.Map as Map +import Data.Set (Set) +import qualified Data.Set as S +import Data.Text (Text) +import qualified Data.Text as T +import qualified Data.Text.Rope as Char +import Data.Text.Utf16.Rope (toText) +import Data.Text.Utf16.Rope.Mixed (Rope) +import qualified Data.Text.Utf16.Rope.Mixed as Rope +import Development.IDE.GHC.Compat +import Development.IDE.GHC.Error (realSrcSpanToCodePointRange) +import Language.LSP.Protocol.Types (Position (Position), + Range (Range), UInt) +import Language.LSP.VFS hiding (line) +import Prelude hiding (length, span) type Tokenizer m a = forall t. StateT (PTokenState t) m a type RangeIdSetMap = Map.Map Range (Set Identifier) data PTokenState t = PTokenState - { rangeIdSetMap :: RangeIdSetMap, - rope :: Rope, - cursor :: Char.Position, - currentAst :: HieAST t, - columnsInUtf16 :: UInt, - currentRange :: Range, + { rangeIdSetMap :: RangeIdSetMap, + rope :: Rope, + cursor :: Char.Position, + currentAst :: HieAST t, + columnsInUtf16 :: UInt, + currentRange :: Range, currentRangeContext :: SplitResult } @@ -117,10 +117,10 @@ visitLeafIds = liftMaybeM $ do occStr <- lift $ case (occNameString . nameOccName) name of -- the generated selector name with {-# LANGUAGE DuplicateRecordFields #-} '$' : 's' : 'e' : 'l' : ':' : xs -> Just $ takeWhile (/= ':') xs - ['$'] -> Just "$" + ['$'] -> Just "$" -- other generated names that should not be visible - '$' : _ -> Nothing - ns -> Just ns + '$' : _ -> Nothing + ns -> Just ns case ranSplit of (NoSplit (tk, r)) -> do guard $ T.unpack tk == occStr @@ -174,7 +174,7 @@ focusTokenAt leaf = do newColumn :: UInt -> Text -> UInt newColumn n rp = case T.breakOnEnd "\n" rp of ("", nEnd) -> n + fromIntegral (Rope.utf16Length $ Rope.fromText nEnd) - (_, nEnd) -> fromIntegral (Rope.utf16Length $ Rope.fromText nEnd) + (_, nEnd) -> fromIntegral (Rope.utf16Length $ Rope.fromText nEnd) codePointRangeToRangeWith :: UInt -> UInt -> CodePointRange -> Range codePointRangeToRangeWith newStartCol newEndCol (CodePointRange (CodePointPosition startLine _) (CodePointPosition endLine _)) = Range (Position startLine newStartCol) (Position endLine newEndCol) @@ -193,7 +193,7 @@ splitRangeByText tk ran = do let (ran', tk') = case T.uncons tk of Just ('(', xs) -> (subOneRange ran, T.takeWhile (/= ')') xs) Just ('`', xs) -> (subOneRange ran, T.takeWhile (/= '`') xs) - _ -> (ran, tk) + _ -> (ran, tk) let (prefix, tk'') = T.breakOnEnd "." tk' spr <- splitRange tk'' (fromIntegral $ Rope.utf16Length $ Rope.fromText prefix) ran' return spr From bb7f50d47e1cfe9683ff26fbaa722a2471d5bf7a Mon Sep 17 00:00:00 2001 From: Patrick Date: Mon, 22 Jan 2024 22:32:37 +0800 Subject: [PATCH 28/74] cleanup --- .../src/Ide/Plugin/SemanticTokens/Query.hs | 8 +- .../src/Ide/Plugin/SemanticTokens/Tokenize.hs | 5 +- .../src/Ide/Plugin/SemanticTokens/Utils.hs | 80 ++----------------- 3 files changed, 8 insertions(+), 85 deletions(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs index f817418a70..9a26ff0fbc 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs @@ -11,30 +11,24 @@ module Ide.Plugin.SemanticTokens.Query where import Data.Foldable (fold) import qualified Data.Map as M import qualified Data.Map as Map -import Data.Maybe (fromMaybe, listToMaybe, - mapMaybe) +import Data.Maybe (listToMaybe, mapMaybe) import Data.Set (Set) -import qualified Data.Set as S import qualified Data.Set as Set import Data.Text (Text) import Development.IDE.Core.PositionMapping (PositionMapping, toCurrentRange) import Development.IDE.GHC.Compat -import Development.IDE.GHC.Error (realSrcSpanToCodePointRange) import Ide.Plugin.SemanticTokens.Mappings import Ide.Plugin.SemanticTokens.Types (HieFunMaskKind, HsSemanticTokenType (TModule), NameSemanticMap, SemanticTokensConfig) -import Ide.Plugin.SemanticTokens.Utils (getTextByCodePointRangeFromVfs, - splitAndBreakModuleNameAndOccName) import Language.LSP.Protocol.Types (Position (Position), Range (Range), SemanticTokenAbsolute (SemanticTokenAbsolute), SemanticTokens, defaultSemanticTokensLegend, makeSemanticTokens) -import Language.LSP.VFS import Prelude hiding (length, span) --------------------------------------------------------- diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs index 9c095f7c0e..dd44b7ced9 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs @@ -8,9 +8,8 @@ module Ide.Plugin.SemanticTokens.Tokenize (hieAstSpanIdentifiers) where import Control.Lens (Identity (runIdentity)) import Control.Monad (forM_, guard) import Control.Monad.State (MonadState (get), - MonadTrans (lift), execState, - execStateT, gets, modify, put, - runStateT) + MonadTrans (lift), execStateT, + gets, modify, put) import Control.Monad.Trans.State (StateT) import qualified Data.Map as M import qualified Data.Map as Map diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs index a453888277..f6b4657e30 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs @@ -8,20 +8,12 @@ module Ide.Plugin.SemanticTokens.Utils where -import Control.Monad (guard) -import Data.Bool (bool) -import Data.ByteString (ByteString) -import Data.ByteString.Char8 (unpack) -import qualified Data.Map as Map -import Data.Text (Text, breakOnEnd, length) -import qualified Data.Text as T -import Data.Text.Utf16.Rope (Rope, splitAtPosition) -import qualified Data.Text.Utf16.Rope as Rope -import Development.IDE (Position (..), Range (..)) +import Data.ByteString (ByteString) +import Data.ByteString.Char8 (unpack) +import qualified Data.Map as Map +import Development.IDE (Position (..), Range (..)) import Development.IDE.GHC.Compat -import Development.IDE.GHC.Compat.Util (mkFastString) -import Language.LSP.VFS (VirtualFile, _file_text) -import Prelude hiding (length, span) +import Prelude hiding (length, span) deriving instance Show DeclType deriving instance Show BindType @@ -103,68 +95,6 @@ mkRange startLine startCol len = Range (Position (fromIntegral startLine) (fromIntegral startCol)) (Position (fromIntegral startLine) (fromIntegral $ startCol + len)) --- | split a qualified identifier into module name and identifier and/or strip the (), `` --- for `ModuleA.b`, break it into `ModuleA.` and `b` --- for `(b)`, strip `()`, and get `b` --- for `(ModuleA.b)`, strip `()` and break it into `ModuleA.` and `b` --- nameLength get the length of the `b` in code points unit --- while Range might not be in code points unit. --- but the comparison is still valid since we only want to know if it is potentially a qualified identifier --- or an identifier that is wrapped in () or `` -splitAndBreakModuleNameAndOccName :: Text -> Range -> Identifier -> [(Range,Identifier)] -splitAndBreakModuleNameAndOccName _ ran (Left m) = [(ran, Left m)] -splitAndBreakModuleNameAndOccName txt ran@(Range (Position startLine startColumn) (Position _endLine endColumn)) (Right name) - | nameLength name < fromIntegral (endColumn - startColumn) = - let stripFlag = peekStripFlag txt - in case peekPrefixModuleNameLength txt of - Just prefixLen -> - [(Range (Position startLine (startColumn + bool 0 1 stripFlag)) - (Position startLine (startColumn + fromIntegral prefixLen)) , Left (mkModuleName "")), -- we do not need the module name, only tis range - (Range (Position startLine (startColumn + fromIntegral prefixLen)) - (Position startLine (endColumn + bool 0 (-1) stripFlag)), Right name)] - Nothing -> if stripFlag - then [(Range (Position startLine (startColumn+1)) (Position _endLine (endColumn-1)), Right name)] - else [(ran, Right name)] - | otherwise = [(ran, Right name)] - -nameLength :: Name -> Int -nameLength = lengthFS . occNameFS . nameOccName - -peekStripFlag :: Text -> Bool -peekStripFlag token = - case T.uncons token of - Just (c, _) -> c `elem` strippedChars - Nothing -> False - where strippedChars = ['`', '('] - --- | peek at the prefix of a range, --- if it is a qualified name, return the length of the module name. --- module name everything before the last dot. -peekPrefixModuleNameLength :: Text -> Maybe Int -peekPrefixModuleNameLength token = do - let prefixLen = length $ fst $ breakOnEnd "." token - guard $ prefixLen > 0 - return prefixLen - --- | get the text from a range in a virtual file -getTextByCodePointRangeFromVfs :: VirtualFile -> Range -> Maybe Text -getTextByCodePointRangeFromVfs vf ra = do - let rp = vf._file_text - let (pos, len) = rangeToPositionLength ra - (_, suffix) <- splitAtPosition (codePointPositionRopePosition pos) rp - (prefix, _) <- Rope.splitAt len suffix - let token = Rope.toText prefix - return token - where - rangeToPositionLength :: (Integral l) => Range -> (Position, l) - rangeToPositionLength (Range beginPos@(Position _ startColumn) (Position _ endColumn)) = - (beginPos, fromIntegral $ endColumn - startColumn) - codePointPositionRopePosition :: Position -> Rope.Position - codePointPositionRopePosition (Position line column) = do - let line' = fromIntegral line - let column' = fromIntegral column - Rope.Position line' column' - rangeShortStr :: Range -> String rangeShortStr (Range (Position startLine startColumn) (Position endLine endColumn)) = show startLine <> ":" <> show startColumn <> "-" <> show endLine <> ":" <> show endColumn From d596876200392eb16ce6971dd8aaf59453cbc0df Mon Sep 17 00:00:00 2001 From: Patrick Date: Tue, 23 Jan 2024 00:04:02 +0800 Subject: [PATCH 29/74] cleaup --- .../src/Ide/Plugin/SemanticTokens/Tokenize.hs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs index dd44b7ced9..68f7658ef8 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs @@ -194,8 +194,7 @@ splitRangeByText tk ran = do Just ('`', xs) -> (subOneRange ran, T.takeWhile (/= '`') xs) _ -> (ran, tk) let (prefix, tk'') = T.breakOnEnd "." tk' - spr <- splitRange tk'' (fromIntegral $ Rope.utf16Length $ Rope.fromText prefix) ran' - return spr + splitRange tk'' (fromIntegral $ Rope.utf16Length $ Rope.fromText prefix) ran' where splitRange :: Text -> UInt -> Range -> Maybe SplitResult splitRange tx n r@(Range (Position l1 c1) (Position l2 c2)) From d113778125fafc6d4273b589c142abc1c3242b2f Mon Sep 17 00:00:00 2001 From: Patrick Date: Tue, 23 Jan 2024 14:57:10 +0800 Subject: [PATCH 30/74] add type signatures --- .../src/Ide/Plugin/SemanticTokens/Internal.hs | 1 - .../Plugin/SemanticTokens/SemanticConfig.hs | 44 +++++++++++++++++-- 2 files changed, 40 insertions(+), 5 deletions(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs index 547330a474..c539ed9dc7 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs @@ -13,7 +13,6 @@ {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE UnicodeSyntax #-} -{-# OPTIONS_GHC -Wno-missing-signatures #-} -- | -- This module provides the core functionality of the plugin. diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs index 5d7bed662b..73244224ca 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs @@ -4,6 +4,7 @@ {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeOperators #-} +{-# LANGUAGE DataKinds #-} module Ide.Plugin.SemanticTokens.SemanticConfig where @@ -11,13 +12,14 @@ import Data.Char (toLower) import Data.Default (def) import qualified Data.Set as S import qualified Data.Text as T -import Development.IDE (usePropertyAction) +import Development.IDE (usePropertyAction, Action) import Ide.Plugin.Properties (defineEnumProperty, - emptyProperties) + emptyProperties, Properties, PropertyType(type TEnum), PropertyKey(type PropertyKey)) import Ide.Plugin.SemanticTokens.Types import Language.Haskell.TH import Language.LSP.Protocol.Types (LspEnum (..), SemanticTokenTypes) +import Ide.Types (PluginId) @@ -69,18 +71,24 @@ defineSemanticProperty (lb, tokenType, st) = semanticDef :: SemanticTokensConfig semanticDef = def + + -- | it produces the following functions: -- semanticConfigProperties :: Properties '[ -- 'PropertyKey "Variable" ('TEnum SemanticTokenTypes), -- ... -- ] +-- useSemanticConfigAction :: PluginId -> Action SemanticTokensConfig + -- useSemanticConfigAction :: PluginId -> Action SemanticTokensConfig mkSemanticConfigFunctions :: Q [Dec] mkSemanticConfigFunctions = do let pid = mkName "pid" let semanticConfigPropertiesName = mkName "semanticConfigProperties" let useSemanticConfigActionName = mkName "useSemanticConfigAction" - let allLabels = map (LabelE . (<> "Token"). lowerFirst) allHsTokenNameStrings + let + allLabelStrs = map ((<> "Token"). lowerFirst) allHsTokenNameStrings + allLabels = map (LabelE . (<> "Token"). lowerFirst) allHsTokenNameStrings allFieldsNames = map (mkName . toConfigName) allHsTokenNameStrings allVariableNames = map (mkName . ("_variable_" <>) . toConfigName) allHsTokenNameStrings -- <- useSemanticConfigAction label pid config @@ -95,6 +103,7 @@ mkSemanticConfigFunctions = do -- get and then update record bb = DoE Nothing $ getProperties ++ [NoBindS $ AppE (VarE 'return) recordUpdate] let useSemanticConfigAction = FunD useSemanticConfigActionName [Clause [VarP pid] (NormalB bb) []] + let useSemanticConfigActionSig = SigD useSemanticConfigActionName (ArrowT `AppT ` ConT ''PluginId `AppT` (ConT ''Action `AppT` ConT ''SemanticTokensConfig)) -- SemanticConfigProperties nameAndDescList <- @@ -106,5 +115,32 @@ mkSemanticConfigFunctions = do ) $ zip allLabels allHsTokenTypes let body = foldr (AppE . AppE (VarE 'defineSemanticProperty)) (VarE 'emptyProperties) nameAndDescList + let propertiesType = foldr (\la rest -> + (PromotedConsT `AppT` + (AppT (ConT 'PropertyKey) (LitT (StrTyLit la)) `AppT` AppT (ConT 'TEnum) (ConT ''SemanticTokenTypes))) + `AppT` rest) + PromotedNilT allLabelStrs let semanticConfigProperties = FunD semanticConfigPropertiesName [Clause [] (NormalB body) []] - return [semanticConfigProperties, useSemanticConfigAction] + let semanticConfigPropertiesSig = SigD semanticConfigPropertiesName (AppT (ConT ''Properties) propertiesType) + return [semanticConfigPropertiesSig, semanticConfigProperties, useSemanticConfigActionSig, useSemanticConfigAction] + +go :: Properties + '[ 'PropertyKey "variableToken" ('TEnum SemanticTokenTypes), + 'PropertyKey "functionToken" ('TEnum SemanticTokenTypes), + 'PropertyKey "dataConstructorToken" ('TEnum SemanticTokenTypes), + 'PropertyKey "typeVariableToken" ('TEnum SemanticTokenTypes), + 'PropertyKey "classMethodToken" ('TEnum SemanticTokenTypes), + 'PropertyKey "patternSynonymToken" ('TEnum SemanticTokenTypes), + 'PropertyKey "typeConstructorToken" ('TEnum SemanticTokenTypes), + 'PropertyKey "classToken" ('TEnum SemanticTokenTypes), + 'PropertyKey "typeSynonymToken" ('TEnum SemanticTokenTypes), + 'PropertyKey "typeFamilyToken" ('TEnum SemanticTokenTypes), + 'PropertyKey "recordFieldToken" ('TEnum SemanticTokenTypes), + 'PropertyKey "moduleToken" ('TEnum SemanticTokenTypes)] +go = undefined + +-- mkSemanticConfigPropertiesType :: Q [Dec] +-- mkSemanticConfigPropertiesType = do +-- let propertiesType = AppT (PromotedConsT `AppT` (AppT (ConT ''PropertyKey) (LitT (StrTyLit "Variable")) `AppT` +-- (AppT (ConT ''TEnum) (ConT ''SemanticTokenTypes))) (PromotedNilT)) +-- return [SigD (mkName "semanticConfigProperties") (AppT (ConT ''Properties) propertiesType)] From 1ea88fbd5d6d885cede46c7c2155b252ff51b34f Mon Sep 17 00:00:00 2001 From: Patrick Date: Tue, 23 Jan 2024 14:58:57 +0800 Subject: [PATCH 31/74] cleanup --- .../src/Ide/Plugin/SemanticTokens/SemanticConfig.hs | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs index 73244224ca..ea41cd65f9 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs @@ -115,10 +115,9 @@ mkSemanticConfigFunctions = do ) $ zip allLabels allHsTokenTypes let body = foldr (AppE . AppE (VarE 'defineSemanticProperty)) (VarE 'emptyProperties) nameAndDescList - let propertiesType = foldr (\la rest -> - (PromotedConsT `AppT` - (AppT (ConT 'PropertyKey) (LitT (StrTyLit la)) `AppT` AppT (ConT 'TEnum) (ConT ''SemanticTokenTypes))) - `AppT` rest) + let propertiesType = foldr (\la -> + AppT (PromotedConsT `AppT` + (AppT (ConT 'PropertyKey) (LitT (StrTyLit la)) `AppT` AppT (ConT 'TEnum) (ConT ''SemanticTokenTypes)))) PromotedNilT allLabelStrs let semanticConfigProperties = FunD semanticConfigPropertiesName [Clause [] (NormalB body) []] let semanticConfigPropertiesSig = SigD semanticConfigPropertiesName (AppT (ConT ''Properties) propertiesType) From 5cc0c4d482869199223687e08171a96c5b7d9251 Mon Sep 17 00:00:00 2001 From: Patrick Date: Tue, 23 Jan 2024 14:59:41 +0800 Subject: [PATCH 32/74] cleanup --- .../src/Ide/Plugin/SemanticTokens/SemanticConfig.hs | 2 -- 1 file changed, 2 deletions(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs index ea41cd65f9..14b3f51ecc 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs @@ -78,8 +78,6 @@ semanticDef = def -- 'PropertyKey "Variable" ('TEnum SemanticTokenTypes), -- ... -- ] --- useSemanticConfigAction :: PluginId -> Action SemanticTokensConfig - -- useSemanticConfigAction :: PluginId -> Action SemanticTokensConfig mkSemanticConfigFunctions :: Q [Dec] mkSemanticConfigFunctions = do From b8a8ed35d803bcd9da2790199bb45dba9a5d0e6d Mon Sep 17 00:00:00 2001 From: Patrick Date: Tue, 23 Jan 2024 15:00:01 +0800 Subject: [PATCH 33/74] cleanup --- .../Plugin/SemanticTokens/SemanticConfig.hs | 20 ------------------- 1 file changed, 20 deletions(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs index 14b3f51ecc..62b73f20dc 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs @@ -121,23 +121,3 @@ mkSemanticConfigFunctions = do let semanticConfigPropertiesSig = SigD semanticConfigPropertiesName (AppT (ConT ''Properties) propertiesType) return [semanticConfigPropertiesSig, semanticConfigProperties, useSemanticConfigActionSig, useSemanticConfigAction] -go :: Properties - '[ 'PropertyKey "variableToken" ('TEnum SemanticTokenTypes), - 'PropertyKey "functionToken" ('TEnum SemanticTokenTypes), - 'PropertyKey "dataConstructorToken" ('TEnum SemanticTokenTypes), - 'PropertyKey "typeVariableToken" ('TEnum SemanticTokenTypes), - 'PropertyKey "classMethodToken" ('TEnum SemanticTokenTypes), - 'PropertyKey "patternSynonymToken" ('TEnum SemanticTokenTypes), - 'PropertyKey "typeConstructorToken" ('TEnum SemanticTokenTypes), - 'PropertyKey "classToken" ('TEnum SemanticTokenTypes), - 'PropertyKey "typeSynonymToken" ('TEnum SemanticTokenTypes), - 'PropertyKey "typeFamilyToken" ('TEnum SemanticTokenTypes), - 'PropertyKey "recordFieldToken" ('TEnum SemanticTokenTypes), - 'PropertyKey "moduleToken" ('TEnum SemanticTokenTypes)] -go = undefined - --- mkSemanticConfigPropertiesType :: Q [Dec] --- mkSemanticConfigPropertiesType = do --- let propertiesType = AppT (PromotedConsT `AppT` (AppT (ConT ''PropertyKey) (LitT (StrTyLit "Variable")) `AppT` --- (AppT (ConT ''TEnum) (ConT ''SemanticTokenTypes))) (PromotedNilT)) --- return [SigD (mkName "semanticConfigProperties") (AppT (ConT ''Properties) propertiesType)] From 84c8bcbf68e148d6582147723aaf5034b1a052be Mon Sep 17 00:00:00 2001 From: Patrick Date: Tue, 23 Jan 2024 15:11:47 +0800 Subject: [PATCH 34/74] add type sig --- hls-plugin-api/src/Ide/Plugin/Properties.hs | 1 + .../Plugin/SemanticTokens/SemanticConfig.hs | 20 ++++++++++++++++--- 2 files changed, 18 insertions(+), 3 deletions(-) diff --git a/hls-plugin-api/src/Ide/Plugin/Properties.hs b/hls-plugin-api/src/Ide/Plugin/Properties.hs index 3e14bda908..5c0d9a60e1 100644 --- a/hls-plugin-api/src/Ide/Plugin/Properties.hs +++ b/hls-plugin-api/src/Ide/Plugin/Properties.hs @@ -15,6 +15,7 @@ module Ide.Plugin.Properties ( PropertyType (..), ToHsType, + NotElem, MetaData (..), PropertyKey (..), SPropertyKey (..), diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs index 62b73f20dc..e58bf3ed92 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs @@ -1,10 +1,10 @@ {-# LANGUAGE GADTs #-} {-# LANGUAGE OverloadedLabels #-} {-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeOperators #-} {-# LANGUAGE DataKinds #-} +{-# LANGUAGE RankNTypes #-} module Ide.Plugin.SemanticTokens.SemanticConfig where @@ -13,13 +13,23 @@ import Data.Default (def) import qualified Data.Set as S import qualified Data.Text as T import Development.IDE (usePropertyAction, Action) -import Ide.Plugin.Properties (defineEnumProperty, - emptyProperties, Properties, PropertyType(type TEnum), PropertyKey(type PropertyKey)) +import Ide.Plugin.Properties + ( defineEnumProperty, + emptyProperties, + Properties, + PropertyType(type TEnum), + PropertyKey(type PropertyKey), + KeyNameProxy, + HasProperty, + NotElem ) import Ide.Plugin.SemanticTokens.Types import Language.Haskell.TH import Language.LSP.Protocol.Types (LspEnum (..), SemanticTokenTypes) import Ide.Types (PluginId) +import Data.Text (Text) +import GHC.TypeLits (KnownSymbol, Symbol) +import Data.Aeson (ToJSON, FromJSON) @@ -61,6 +71,10 @@ lowerFirst (x:xs) = toLower x : xs allHsTokenNameStrings :: [String] allHsTokenNameStrings = map (drop 1 . show) allHsTokenTypes +defineSemanticProperty :: (NotElem s r, KnownSymbol s) + => (KeyNameProxy s, Text, SemanticTokenTypes) + -> Properties r + -> Properties ('PropertyKey s (TEnum SemanticTokenTypes) : r) defineSemanticProperty (lb, tokenType, st) = defineEnumProperty lb From 77b621065836621b3f9a6f6896e436df6c767a6e Mon Sep 17 00:00:00 2001 From: Patrick Date: Tue, 23 Jan 2024 15:12:33 +0800 Subject: [PATCH 35/74] fix --- .../src/Ide/Plugin/SemanticTokens/SemanticConfig.hs | 1 + 1 file changed, 1 insertion(+) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs index e58bf3ed92..4272edce86 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs @@ -5,6 +5,7 @@ {-# LANGUAGE TypeOperators #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE RankNTypes #-} +{-# LANGUAGE TemplateHaskell #-} module Ide.Plugin.SemanticTokens.SemanticConfig where From 0cd95407c528f5b67751fff12385adaa4e12fa48 Mon Sep 17 00:00:00 2001 From: Patrick Date: Tue, 23 Jan 2024 15:12:58 +0800 Subject: [PATCH 36/74] fix --- .../src/Ide/Plugin/SemanticTokens/SemanticConfig.hs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs index 4272edce86..4f4ad93d6e 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs @@ -5,7 +5,7 @@ {-# LANGUAGE TypeOperators #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE RankNTypes #-} -{-# LANGUAGE TemplateHaskell #-} +{-# LANGUAGE TemplateHaskellQuotes #-} module Ide.Plugin.SemanticTokens.SemanticConfig where From a83a072c2f25ffa380d629e1bb007154b8ca8843 Mon Sep 17 00:00:00 2001 From: Patrick Date: Tue, 23 Jan 2024 15:14:28 +0800 Subject: [PATCH 37/74] cleanup --- .../src/Ide/Plugin/SemanticTokens/SemanticConfig.hs | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs index 4f4ad93d6e..b64f24e047 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs @@ -21,7 +21,6 @@ import Ide.Plugin.Properties PropertyType(type TEnum), PropertyKey(type PropertyKey), KeyNameProxy, - HasProperty, NotElem ) import Ide.Plugin.SemanticTokens.Types import Language.Haskell.TH @@ -29,8 +28,7 @@ import Language.LSP.Protocol.Types (LspEnum (..), SemanticTokenTypes) import Ide.Types (PluginId) import Data.Text (Text) -import GHC.TypeLits (KnownSymbol, Symbol) -import Data.Aeson (ToJSON, FromJSON) +import GHC.TypeLits (KnownSymbol) From b5e93e97b08de8f670104fbbc3a120409e52aced Mon Sep 17 00:00:00 2001 From: Patrick Date: Tue, 23 Jan 2024 20:35:40 +0800 Subject: [PATCH 38/74] cleanup --- .../Plugin/SemanticTokens/SemanticConfig.hs | 36 +++++++++---------- 1 file changed, 17 insertions(+), 19 deletions(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs index b64f24e047..2350e42937 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs @@ -1,34 +1,32 @@ -{-# LANGUAGE GADTs #-} -{-# LANGUAGE OverloadedLabels #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE TypeFamilies #-} -{-# LANGUAGE TypeOperators #-} -{-# LANGUAGE DataKinds #-} -{-# LANGUAGE RankNTypes #-} +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE GADTs #-} +{-# LANGUAGE OverloadedLabels #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RankNTypes #-} {-# LANGUAGE TemplateHaskellQuotes #-} +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE TypeOperators #-} module Ide.Plugin.SemanticTokens.SemanticConfig where import Data.Char (toLower) import Data.Default (def) import qualified Data.Set as S +import Data.Text (Text) import qualified Data.Text as T -import Development.IDE (usePropertyAction, Action) -import Ide.Plugin.Properties - ( defineEnumProperty, - emptyProperties, - Properties, - PropertyType(type TEnum), - PropertyKey(type PropertyKey), - KeyNameProxy, - NotElem ) +import Development.IDE (Action, usePropertyAction) +import GHC.TypeLits (KnownSymbol) +import Ide.Plugin.Properties (KeyNameProxy, NotElem, + Properties, + PropertyKey (type PropertyKey), + PropertyType (type TEnum), + defineEnumProperty, + emptyProperties) import Ide.Plugin.SemanticTokens.Types +import Ide.Types (PluginId) import Language.Haskell.TH import Language.LSP.Protocol.Types (LspEnum (..), SemanticTokenTypes) -import Ide.Types (PluginId) -import Data.Text (Text) -import GHC.TypeLits (KnownSymbol) From 23d0d766fc5966a44a90e4549f5d89aeeb43984f Mon Sep 17 00:00:00 2001 From: Patrick Date: Tue, 23 Jan 2024 21:03:05 +0800 Subject: [PATCH 39/74] remove lengthFS --- ghcide/src/Development/IDE/GHC/Compat.hs | 1 - .../Plugin/SemanticTokens/SemanticConfig.hs | 63 ++++++++++--------- 2 files changed, 32 insertions(+), 32 deletions(-) diff --git a/ghcide/src/Development/IDE/GHC/Compat.hs b/ghcide/src/Development/IDE/GHC/Compat.hs index 8cdc22cdf4..12c3fb346e 100644 --- a/ghcide/src/Development/IDE/GHC/Compat.hs +++ b/ghcide/src/Development/IDE/GHC/Compat.hs @@ -38,7 +38,6 @@ module Development.IDE.GHC.Compat( FastStringCompat, bytesFS, mkFastStringByteString, - lengthFS, nodeInfo', getNodeIds, sourceNodeInfo, diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs index 2350e42937..b3d8aeb7ad 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs @@ -28,22 +28,20 @@ import Language.Haskell.TH import Language.LSP.Protocol.Types (LspEnum (..), SemanticTokenTypes) - - docName :: HsSemanticTokenType -> T.Text docName tt = case tt of - TVariable -> "variables" - TFunction -> "functions" - TDataConstructor -> "data constructors" - TTypeVariable -> "type variables" - TClassMethod -> "typeclass methods" - TPatternSynonym -> "pattern synonyms" - TTypeConstructor -> "type constructors" - TClass -> "typeclasses" - TTypeSynonym -> "type synonyms" - TTypeFamily -> "type families" - TRecordField -> "record fields" - TModule -> "modules" + TVariable -> "variables" + TFunction -> "functions" + TDataConstructor -> "data constructors" + TTypeVariable -> "type variables" + TClassMethod -> "typeclass methods" + TPatternSynonym -> "pattern synonyms" + TTypeConstructor -> "type constructors" + TClass -> "typeclasses" + TTypeSynonym -> "type synonyms" + TTypeFamily -> "type families" + TRecordField -> "record fields" + TModule -> "modules" toConfigName :: String -> String toConfigName = ("st" <>) @@ -62,16 +60,17 @@ allHsTokenTypes :: [HsSemanticTokenType] allHsTokenTypes = enumFrom minBound lowerFirst :: String -> String -lowerFirst [] = [] -lowerFirst (x:xs) = toLower x : xs +lowerFirst [] = [] +lowerFirst (x : xs) = toLower x : xs allHsTokenNameStrings :: [String] allHsTokenNameStrings = map (drop 1 . show) allHsTokenTypes -defineSemanticProperty :: (NotElem s r, KnownSymbol s) - => (KeyNameProxy s, Text, SemanticTokenTypes) - -> Properties r - -> Properties ('PropertyKey s (TEnum SemanticTokenTypes) : r) +defineSemanticProperty :: + (NotElem s r, KnownSymbol s) => + (KeyNameProxy s, Text, SemanticTokenTypes) -> + Properties r -> + Properties ('PropertyKey s (TEnum SemanticTokenTypes) : r) defineSemanticProperty (lb, tokenType, st) = defineEnumProperty lb @@ -82,8 +81,6 @@ defineSemanticProperty (lb, tokenType, st) = semanticDef :: SemanticTokensConfig semanticDef = def - - -- | it produces the following functions: -- semanticConfigProperties :: Properties '[ -- 'PropertyKey "Variable" ('TEnum SemanticTokenTypes), @@ -95,9 +92,8 @@ mkSemanticConfigFunctions = do let pid = mkName "pid" let semanticConfigPropertiesName = mkName "semanticConfigProperties" let useSemanticConfigActionName = mkName "useSemanticConfigAction" - let - allLabelStrs = map ((<> "Token"). lowerFirst) allHsTokenNameStrings - allLabels = map (LabelE . (<> "Token"). lowerFirst) allHsTokenNameStrings + let allLabelStrs = map ((<> "Token") . lowerFirst) allHsTokenNameStrings + allLabels = map (LabelE . (<> "Token") . lowerFirst) allHsTokenNameStrings allFieldsNames = map (mkName . toConfigName) allHsTokenNameStrings allVariableNames = map (mkName . ("_variable_" <>) . toConfigName) allHsTokenNameStrings -- <- useSemanticConfigAction label pid config @@ -112,7 +108,7 @@ mkSemanticConfigFunctions = do -- get and then update record bb = DoE Nothing $ getProperties ++ [NoBindS $ AppE (VarE 'return) recordUpdate] let useSemanticConfigAction = FunD useSemanticConfigActionName [Clause [VarP pid] (NormalB bb) []] - let useSemanticConfigActionSig = SigD useSemanticConfigActionName (ArrowT `AppT ` ConT ''PluginId `AppT` (ConT ''Action `AppT` ConT ''SemanticTokensConfig)) + let useSemanticConfigActionSig = SigD useSemanticConfigActionName (ArrowT `AppT` ConT ''PluginId `AppT` (ConT ''Action `AppT` ConT ''SemanticTokensConfig)) -- SemanticConfigProperties nameAndDescList <- @@ -124,11 +120,16 @@ mkSemanticConfigFunctions = do ) $ zip allLabels allHsTokenTypes let body = foldr (AppE . AppE (VarE 'defineSemanticProperty)) (VarE 'emptyProperties) nameAndDescList - let propertiesType = foldr (\la -> - AppT (PromotedConsT `AppT` - (AppT (ConT 'PropertyKey) (LitT (StrTyLit la)) `AppT` AppT (ConT 'TEnum) (ConT ''SemanticTokenTypes)))) - PromotedNilT allLabelStrs + let propertiesType = + foldr + ( \la -> + AppT + ( PromotedConsT + `AppT` (AppT (ConT 'PropertyKey) (LitT (StrTyLit la)) `AppT` AppT (ConT 'TEnum) (ConT ''SemanticTokenTypes)) + ) + ) + PromotedNilT + allLabelStrs let semanticConfigProperties = FunD semanticConfigPropertiesName [Clause [] (NormalB body) []] let semanticConfigPropertiesSig = SigD semanticConfigPropertiesName (AppT (ConT ''Properties) propertiesType) return [semanticConfigPropertiesSig, semanticConfigProperties, useSemanticConfigActionSig, useSemanticConfigAction] - From a6e941ac89d5b22a1f47fda9162991bf571764ea Mon Sep 17 00:00:00 2001 From: Patrick Date: Tue, 23 Jan 2024 22:09:11 +0800 Subject: [PATCH 40/74] cleanup --- .../src/Ide/Plugin/SemanticTokens/Query.hs | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs index 9a26ff0fbc..7ed9e47955 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs @@ -74,9 +74,7 @@ rangeSemanticMapSemanticTokens stc mapping = . M.mapKeys (toCurrentRange mapping) where toAbsSemanticToken :: Range -> HsSemanticTokenType -> SemanticTokenAbsolute - toAbsSemanticToken (Range (Language.LSP.Protocol.Types.Position startLine startColumn) - (Language.LSP.Protocol.Types.Position _endLine endColumn)) - tokenType = + toAbsSemanticToken (Range (Position startLine startColumn) (Position _endLine endColumn)) tokenType = let len = endColumn - startColumn in SemanticTokenAbsolute (fromIntegral startLine) From 622ac850ffeb176a4bbc3a64138da235826ce5f3 Mon Sep 17 00:00:00 2001 From: Patrick Date: Wed, 24 Jan 2024 23:16:57 +0800 Subject: [PATCH 41/74] make ast traversing explicit --- .../src/Ide/Plugin/SemanticTokens/Tokenize.hs | 28 ++++++++----------- 1 file changed, 12 insertions(+), 16 deletions(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs index 68f7658ef8..427297cc1a 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs @@ -36,7 +36,6 @@ data PTokenState t = PTokenState { rangeIdSetMap :: RangeIdSetMap, rope :: Rope, cursor :: Char.Position, - currentAst :: HieAST t, columnsInUtf16 :: UInt, currentRange :: Range, currentRangeContext :: SplitResult @@ -53,13 +52,12 @@ data SplitResult startRange :: Range startRange = Range (Position 0 0) (Position 0 0) -mkPTokenState :: VirtualFile -> HieAST a -> PTokenState a -mkPTokenState vf ast = +mkPTokenState :: VirtualFile -> PTokenState a +mkPTokenState vf = PTokenState { rangeIdSetMap = mempty, rope = Rope.fromText $ toText vf._file_text, cursor = Char.Position 0 0, - currentAst = ast, columnsInUtf16 = 0, currentRange = startRange, currentRangeContext = NoSplit ("", startRange) @@ -83,22 +81,21 @@ liftMaybeM p = do forM_ (execStateT p st) put hieAstSpanIdentifiers :: VirtualFile -> HieAST a -> RangeIdSetMap -hieAstSpanIdentifiers vf ast = runIdentity $ runTokenizer foldAst (mkPTokenState vf ast) +hieAstSpanIdentifiers vf ast = runIdentity $ runTokenizer (foldAst ast) (mkPTokenState vf) -- | foldAst -- visit every leaf node in the ast in depth first order -foldAst :: (Monad m) => Tokenizer m () -foldAst = do - ast <- gets currentAst +foldAst :: (Monad m) => HieAST t -> Tokenizer m () +foldAst ast = do +-- ast <- gets currentAst if null (nodeChildren ast) - then liftMaybeM visitLeafIds + then liftMaybeM (visitLeafIds ast) else do let children = nodeChildren ast - mapM_ (\x -> (modify $ \s -> s {currentAst = x}) >> foldAst) children + mapM_ foldAst children -visitLeafIds :: Tokenizer Maybe () -visitLeafIds = liftMaybeM $ do - leaf <- gets currentAst +visitLeafIds :: HieAST t -> Tokenizer Maybe () +visitLeafIds leaf = liftMaybeM $ do (ran, token) <- focusTokenAt leaf splitResult <- lift $ splitRangeByText token ran modify $ \s -> s {currentRange = ran, currentRangeContext = splitResult} @@ -139,7 +136,7 @@ focusTokenAt leaf = do cur <- gets cursor cs <- gets columnsInUtf16 let span = nodeSpan leaf - (startPos, length) <- lift $ srcSpanMaybePositionLength span + let (startPos, length) = srcSpanMaybePositionLength span let (gap, startRope) = Rope.charSplitAtPosition (startPos `sub` cur) rp (token, remains) <- lift $ charSplitAtMaybe length startRope let tokenText = Rope.toText token @@ -152,9 +149,8 @@ focusTokenAt leaf = do updateCursor $ srcSpanEndCharPosition span return (ran, tokenText) where - srcSpanMaybePositionLength :: (Integral l) => RealSrcSpan -> Maybe (Char.Position, l) + srcSpanMaybePositionLength :: (Integral l) => RealSrcSpan -> (Char.Position, l) srcSpanMaybePositionLength real = - return ( realSrcLocRopePosition $ realSrcSpanStart real, fromIntegral $ (srcLocCol $ realSrcSpanEnd real) - (srcLocCol $ realSrcSpanStart real) ) From b1f3fa03d30e5700e6dd77d1095598ac2f83aade Mon Sep 17 00:00:00 2001 From: Patrick Date: Wed, 24 Jan 2024 23:43:19 +0800 Subject: [PATCH 42/74] cleanup --- .../src/Ide/Plugin/SemanticTokens/Tokenize.hs | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs index 427297cc1a..dfcbcf8676 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs @@ -87,12 +87,9 @@ hieAstSpanIdentifiers vf ast = runIdentity $ runTokenizer (foldAst ast) (mkPToke -- visit every leaf node in the ast in depth first order foldAst :: (Monad m) => HieAST t -> Tokenizer m () foldAst ast = do --- ast <- gets currentAst if null (nodeChildren ast) then liftMaybeM (visitLeafIds ast) - else do - let children = nodeChildren ast - mapM_ foldAst children + else mapM_ foldAst $ nodeChildren ast visitLeafIds :: HieAST t -> Tokenizer Maybe () visitLeafIds leaf = liftMaybeM $ do From 811d0eb924c84bc7a663234a7b66ab36c6d3d448 Mon Sep 17 00:00:00 2001 From: Patrick Date: Thu, 25 Jan 2024 00:28:21 +0800 Subject: [PATCH 43/74] optimize, splitRangeByText should not revert the work of focusTokenAt --- .../src/Ide/Plugin/SemanticTokens/Tokenize.hs | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs index dfcbcf8676..83bdc9b7c1 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs @@ -94,9 +94,10 @@ foldAst ast = do visitLeafIds :: HieAST t -> Tokenizer Maybe () visitLeafIds leaf = liftMaybeM $ do (ran, token) <- focusTokenAt leaf - splitResult <- lift $ splitRangeByText token ran - modify $ \s -> s {currentRange = ran, currentRangeContext = splitResult} - mapM_ combineNodeIds $ Map.filterWithKey (\k _ -> k == SourceInfo) $ getSourcedNodeInfo $ sourcedNodeInfo leaf + liftMaybeM $ do + splitResult <- lift $ splitRangeByText token ran + modify $ \s -> s {currentRange = ran, currentRangeContext = splitResult} + mapM_ combineNodeIds $ Map.filterWithKey (\k _ -> k == SourceInfo) $ getSourcedNodeInfo $ sourcedNodeInfo leaf where combineNodeIds :: (Monad m) => NodeInfo a -> Tokenizer m () combineNodeIds (NodeInfo _ _ bd) = mapM_ getIdentifier (M.keys bd) From df48d37643351ef7ae21fef2a4d8c721538a20cb Mon Sep 17 00:00:00 2001 From: Patrick Date: Thu, 25 Jan 2024 01:11:00 +0800 Subject: [PATCH 44/74] clean up --- .../src/Ide/Plugin/SemanticTokens/Tokenize.hs | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs index 83bdc9b7c1..ef532aa8fe 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs @@ -19,6 +19,7 @@ import Data.Text (Text) import qualified Data.Text as T import qualified Data.Text.Rope as Char import Data.Text.Utf16.Rope (toText) +import qualified Data.Text.Utf16.Rope as Utf16 import Data.Text.Utf16.Rope.Mixed (Rope) import qualified Data.Text.Utf16.Rope.Mixed as Rope import Development.IDE.GHC.Compat @@ -75,6 +76,9 @@ addRangeIdSetMap r i = modify $ \s -> s {rangeIdSetMap = Map.insertWith (<>) r ( updateColumnsInUtf16 :: (Monad m) => UInt -> Tokenizer m () updateColumnsInUtf16 n = modify $ \s -> s {columnsInUtf16 = n} +-- lift a Tokenizer Maybe () to Tokenizer m (), +-- if the Maybe is Nothing, do nothing, recover the state +-- if the Maybe is Just (), do the action, and keep the state liftMaybeM :: (Monad m) => Tokenizer Maybe () -> Tokenizer m () liftMaybeM p = do st <- get @@ -94,6 +98,8 @@ foldAst ast = do visitLeafIds :: HieAST t -> Tokenizer Maybe () visitLeafIds leaf = liftMaybeM $ do (ran, token) <- focusTokenAt leaf + -- we do want to revert `focusTokenAt` on failure of `splitRangeByText` + -- since the `focusTokenAt` properly update the state liftMaybeM $ do splitResult <- lift $ splitRangeByText token ran modify $ \s -> s {currentRange = ran, currentRangeContext = splitResult} @@ -166,8 +172,8 @@ focusTokenAt leaf = do srcSpanEndCharPosition real = realSrcLocRopePosition $ realSrcSpanEnd real newColumn :: UInt -> Text -> UInt newColumn n rp = case T.breakOnEnd "\n" rp of - ("", nEnd) -> n + fromIntegral (Rope.utf16Length $ Rope.fromText nEnd) - (_, nEnd) -> fromIntegral (Rope.utf16Length $ Rope.fromText nEnd) + ("", nEnd) -> n + utf16Length nEnd + (_, nEnd) -> utf16Length nEnd codePointRangeToRangeWith :: UInt -> UInt -> CodePointRange -> Range codePointRangeToRangeWith newStartCol newEndCol (CodePointRange (CodePointPosition startLine _) (CodePointPosition endLine _)) = Range (Position startLine newStartCol) (Position endLine newEndCol) @@ -188,7 +194,7 @@ splitRangeByText tk ran = do Just ('`', xs) -> (subOneRange ran, T.takeWhile (/= '`') xs) _ -> (ran, tk) let (prefix, tk'') = T.breakOnEnd "." tk' - splitRange tk'' (fromIntegral $ Rope.utf16Length $ Rope.fromText prefix) ran' + splitRange tk'' (utf16Length prefix) ran' where splitRange :: Text -> UInt -> Range -> Maybe SplitResult splitRange tx n r@(Range (Position l1 c1) (Position l2 c2)) @@ -197,3 +203,6 @@ splitRangeByText tk ran = do | otherwise = Nothing subOneRange :: Range -> Range subOneRange (Range (Position l1 c1) (Position l2 c2)) = Range (Position l1 (c1 + 1)) (Position l2 (c2 - 1)) + +utf16Length :: Integral i => Text -> i +utf16Length = fromIntegral . Utf16.length . Utf16.fromText From 6fdff822ef41a68128bed1033d39b6c50a780c0d Mon Sep 17 00:00:00 2001 From: Patrick Date: Thu, 25 Jan 2024 01:12:49 +0800 Subject: [PATCH 45/74] fix doc --- .../src/Ide/Plugin/SemanticTokens/Tokenize.hs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs index ef532aa8fe..e2f21d8031 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs @@ -98,7 +98,7 @@ foldAst ast = do visitLeafIds :: HieAST t -> Tokenizer Maybe () visitLeafIds leaf = liftMaybeM $ do (ran, token) <- focusTokenAt leaf - -- we do want to revert `focusTokenAt` on failure of `splitRangeByText` + -- we do not want to revert `focusTokenAt` on failure of `splitRangeByText` -- since the `focusTokenAt` properly update the state liftMaybeM $ do splitResult <- lift $ splitRangeByText token ran From f76829e7ccb569bfbc5147aad53dcd66f7764453 Mon Sep 17 00:00:00 2001 From: Patrick Date: Thu, 25 Jan 2024 01:23:24 +0800 Subject: [PATCH 46/74] rename Name to Id to fit the change --- .../src/Ide/Plugin/SemanticTokens/Internal.hs | 6 +-- .../src/Ide/Plugin/SemanticTokens/Query.hs | 23 +++++----- .../src/Ide/Plugin/SemanticTokens/Tokenize.hs | 46 +++++++++---------- .../src/Ide/Plugin/SemanticTokens/Types.hs | 5 +- 4 files changed, 42 insertions(+), 38 deletions(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs index c539ed9dc7..518b37e90d 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs @@ -108,10 +108,10 @@ getSemanticTokensRule recorder = -- get current location from the old ones let spanIdMap = M.filter (not . null) $ hieAstSpanIdentifiers virtualFile ast let names = S.toList $ S.unions $ Map.elems spanIdMap - let localSemanticMap = mkLocalNameSemanticFromAst names (hieKindFunMasksKind hieKind) refMap + let localSemanticMap = mkLocalIdSemanticFromAst names (hieKindFunMasksKind hieKind) refMap -- get imported name semantic map - let importedNameSemanticMap = foldr (getTypeExclude localSemanticMap getTyThingMap) mempty names - let sMap = M.unionWith (<>) importedNameSemanticMap localSemanticMap + let importedIdSemanticMap = foldr (getTypeExclude localSemanticMap getTyThingMap) mempty names + let sMap = M.unionWith (<>) importedIdSemanticMap localSemanticMap let rangeTokenType = extractSemanticTokensFromNames sMap spanIdMap return $ RangeHsSemanticTokenTypes rangeTokenType where diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs index 7ed9e47955..b97c797457 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs @@ -21,7 +21,8 @@ import Development.IDE.GHC.Compat import Ide.Plugin.SemanticTokens.Mappings import Ide.Plugin.SemanticTokens.Types (HieFunMaskKind, HsSemanticTokenType (TModule), - NameSemanticMap, + IdSemanticMap, + RangeIdSetMap, SemanticTokensConfig) import Language.LSP.Protocol.Types (Position (Position), Range (Range), @@ -37,17 +38,17 @@ import Prelude hiding (length, span) --------------------------------------------------------- -mkLocalNameSemanticFromAst :: [Identifier] -> HieFunMaskKind a -> RefMap a -> NameSemanticMap -mkLocalNameSemanticFromAst names hieKind rm = M.fromList (mapMaybe (nameNameSemanticFromHie hieKind rm) names) +mkLocalIdSemanticFromAst :: [Identifier] -> HieFunMaskKind a -> RefMap a -> IdSemanticMap +mkLocalIdSemanticFromAst names hieKind rm = M.fromList (mapMaybe (idIdSemanticFromHie hieKind rm) names) -nameNameSemanticFromHie :: forall a. HieFunMaskKind a -> RefMap a -> Identifier -> Maybe (Identifier, HsSemanticTokenType) -nameNameSemanticFromHie _ _ ns@(Left _) = Just (ns, TModule) -nameNameSemanticFromHie hieKind rm ns@(Right _) = do - st <- nameSemanticFromRefMap rm ns +idIdSemanticFromHie :: forall a. HieFunMaskKind a -> RefMap a -> Identifier -> Maybe (Identifier, HsSemanticTokenType) +idIdSemanticFromHie _ _ ns@(Left _) = Just (ns, TModule) +idIdSemanticFromHie hieKind rm ns@(Right _) = do + st <- idSemanticFromRefMap rm ns return (ns, st) where - nameSemanticFromRefMap :: RefMap a -> Identifier -> Maybe HsSemanticTokenType - nameSemanticFromRefMap rm' name' = do + idSemanticFromRefMap :: RefMap a -> Identifier -> Maybe HsSemanticTokenType + idSemanticFromRefMap rm' name' = do spanInfos <- Map.lookup name' rm' let typeTokenType = foldMap (typeSemantic hieKind) $ listToMaybe $ mapMaybe (identType . snd) spanInfos contextInfoTokenType <- foldMap (contextInfosMaybeTokenType . identInfo . snd) spanInfos @@ -59,11 +60,11 @@ nameNameSemanticFromHie hieKind rm ns@(Right _) = do ------------------------------------------------- --- * extract semantic tokens from NameSemanticMap +-- * extract semantic tokens from IdSemanticMap ------------------------------------------------- -extractSemanticTokensFromNames :: NameSemanticMap -> M.Map Range (Set Identifier) -> M.Map Range HsSemanticTokenType +extractSemanticTokensFromNames :: IdSemanticMap -> RangeIdSetMap -> M.Map Range HsSemanticTokenType extractSemanticTokensFromNames nsm = Map.mapMaybe (foldMap (`M.lookup` nsm)) rangeSemanticMapSemanticTokens :: SemanticTokensConfig -> PositionMapping -> M.Map Range HsSemanticTokenType -> Either Text SemanticTokens diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs index e2f21d8031..2ed34a8309 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs @@ -5,33 +5,33 @@ module Ide.Plugin.SemanticTokens.Tokenize (hieAstSpanIdentifiers) where -import Control.Lens (Identity (runIdentity)) -import Control.Monad (forM_, guard) -import Control.Monad.State (MonadState (get), - MonadTrans (lift), execStateT, - gets, modify, put) -import Control.Monad.Trans.State (StateT) -import qualified Data.Map as M -import qualified Data.Map as Map -import Data.Set (Set) -import qualified Data.Set as S -import Data.Text (Text) -import qualified Data.Text as T -import qualified Data.Text.Rope as Char -import Data.Text.Utf16.Rope (toText) -import qualified Data.Text.Utf16.Rope as Utf16 -import Data.Text.Utf16.Rope.Mixed (Rope) -import qualified Data.Text.Utf16.Rope.Mixed as Rope +import Control.Lens (Identity (runIdentity)) +import Control.Monad (forM_, guard) +import Control.Monad.State (MonadState (get), + MonadTrans (lift), execStateT, + gets, modify, put) +import Control.Monad.Trans.State (StateT) +import qualified Data.Map as M +import qualified Data.Map as Map +import Data.Set (Set) +import qualified Data.Set as S +import Data.Text (Text) +import qualified Data.Text as T +import qualified Data.Text.Rope as Char +import Data.Text.Utf16.Rope (toText) +import qualified Data.Text.Utf16.Rope as Utf16 +import Data.Text.Utf16.Rope.Mixed (Rope) +import qualified Data.Text.Utf16.Rope.Mixed as Rope import Development.IDE.GHC.Compat -import Development.IDE.GHC.Error (realSrcSpanToCodePointRange) -import Language.LSP.Protocol.Types (Position (Position), - Range (Range), UInt) -import Language.LSP.VFS hiding (line) -import Prelude hiding (length, span) +import Development.IDE.GHC.Error (realSrcSpanToCodePointRange) +import Ide.Plugin.SemanticTokens.Types (RangeIdSetMap) +import Language.LSP.Protocol.Types (Position (Position), + Range (Range), UInt) +import Language.LSP.VFS hiding (line) +import Prelude hiding (length, span) type Tokenizer m a = forall t. StateT (PTokenState t) m a -type RangeIdSetMap = Map.Map Range (Set Identifier) data PTokenState t = PTokenState { rangeIdSetMap :: RangeIdSetMap, diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Types.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Types.hs index f1e8807de8..8871c0dc5d 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Types.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Types.hs @@ -26,6 +26,7 @@ import GHC.Generics (Generic) import Language.LSP.Protocol.Types -- import template haskell import Data.Map (Map) +import Data.Set (Set) import Language.Haskell.TH.Syntax (Lift) @@ -112,7 +113,9 @@ data Loc = Loc instance Show Loc where show (Loc line startChar len) = show line <> ":" <> show startChar <> "-" <> show (startChar + len) -type NameSemanticMap = Map Identifier HsSemanticTokenType +type RangeIdSetMap = Map Range (Set Identifier) + +type IdSemanticMap = Map Identifier HsSemanticTokenType data GetSemanticTokens = GetSemanticTokens deriving (Eq, Show, Typeable, Generic) From c65d1514a7ed3808f686309f11bc6811917b6b91 Mon Sep 17 00:00:00 2001 From: Patrick Date: Thu, 25 Jan 2024 01:28:59 +0800 Subject: [PATCH 47/74] fix doc --- .../src/Ide/Plugin/SemanticTokens/Tokenize.hs | 4 ---- 1 file changed, 4 deletions(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs index 2ed34a8309..eb38d831ae 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs @@ -183,10 +183,6 @@ focusTokenAt leaf = do -- for `ModuleA.b`, break it into `ModuleA.` and `b` -- for `(b)`, strip `()`, and get `b` -- for `(ModuleA.b)`, strip `()` and break it into `ModuleA.` and `b` --- nameLength get the length of the `b` in code points unit --- while Range might not be in code points unit. --- but the comparison is still valid since we only want to know if it is potentially a qualified identifier --- or an identifier that is wrapped in () or `` splitRangeByText :: Text -> Range -> Maybe SplitResult splitRangeByText tk ran = do let (ran', tk') = case T.uncons tk of From a0a1fb62988b9c7180bf3f322bb3c8955f9ddde3 Mon Sep 17 00:00:00 2001 From: Patrick Date: Thu, 25 Jan 2024 01:37:08 +0800 Subject: [PATCH 48/74] clean up --- .../src/Ide/Plugin/SemanticTokens/Tokenize.hs | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs index eb38d831ae..907a806688 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs @@ -5,6 +5,7 @@ module Ide.Plugin.SemanticTokens.Tokenize (hieAstSpanIdentifiers) where +import Control.Arrow (first) import Control.Lens (Identity (runIdentity)) import Control.Monad (forM_, guard) import Control.Monad.State (MonadState (get), @@ -141,28 +142,27 @@ focusTokenAt leaf = do cs <- gets columnsInUtf16 let span = nodeSpan leaf let (startPos, length) = srcSpanMaybePositionLength span - let (gap, startRope) = Rope.charSplitAtPosition (startPos `sub` cur) rp + let (gap, startRope) = first Rope.toText $ Rope.charSplitAtPosition (startPos `sub` cur) rp (token, remains) <- lift $ charSplitAtMaybe length startRope - let tokenText = Rope.toText token - let ncs = newColumn cs $ Rope.toText gap - let nce = newColumn ncs tokenText + let ncs = newColumn cs gap + let nce = newColumn ncs token -- compute the new range for utf16 let ran = codePointRangeToRangeWith ncs nce $ realSrcSpanToCodePointRange span updateColumnsInUtf16 nce updateRope remains updateCursor $ srcSpanEndCharPosition span - return (ran, tokenText) + return (ran, token) where srcSpanMaybePositionLength :: (Integral l) => RealSrcSpan -> (Char.Position, l) srcSpanMaybePositionLength real = ( realSrcLocRopePosition $ realSrcSpanStart real, fromIntegral $ (srcLocCol $ realSrcSpanEnd real) - (srcLocCol $ realSrcSpanStart real) ) - charSplitAtMaybe :: Word -> Rope -> Maybe (Rope, Rope) + charSplitAtMaybe :: Word -> Rope -> Maybe (Text, Rope) charSplitAtMaybe len rpe = do let (prefix, suffix) = Rope.charSplitAt len rpe guard $ Rope.charLength prefix == len - return (prefix, suffix) + return (Rope.toText prefix, suffix) sub :: Char.Position -> Char.Position -> Char.Position sub (Char.Position l1 c1) (Char.Position l2 c2) = if l1 == l2 then Char.Position 0 (c1 - c2) else Char.Position (l1 - l2) c1 From 43b70b93e76faf850ae70c2bb8c37ac6ccdbe93e Mon Sep 17 00:00:00 2001 From: Patrick Date: Thu, 25 Jan 2024 01:43:32 +0800 Subject: [PATCH 49/74] clean up --- .../src/Ide/Plugin/SemanticTokens/Tokenize.hs | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs index 907a806688..8f3650195c 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs @@ -150,7 +150,7 @@ focusTokenAt leaf = do let ran = codePointRangeToRangeWith ncs nce $ realSrcSpanToCodePointRange span updateColumnsInUtf16 nce updateRope remains - updateCursor $ srcSpanEndCharPosition span + updateCursor $ realSrcLocRopePosition $ realSrcSpanEnd span return (ran, token) where srcSpanMaybePositionLength :: (Integral l) => RealSrcSpan -> (Char.Position, l) @@ -168,8 +168,6 @@ focusTokenAt leaf = do if l1 == l2 then Char.Position 0 (c1 - c2) else Char.Position (l1 - l2) c1 realSrcLocRopePosition :: RealSrcLoc -> Char.Position realSrcLocRopePosition real = Char.Position (fromIntegral $ srcLocLine real - 1) (fromIntegral $ srcLocCol real - 1) - srcSpanEndCharPosition :: RealSrcSpan -> Char.Position - srcSpanEndCharPosition real = realSrcLocRopePosition $ realSrcSpanEnd real newColumn :: UInt -> Text -> UInt newColumn n rp = case T.breakOnEnd "\n" rp of ("", nEnd) -> n + utf16Length nEnd From d77788e272cfd8536e1fb3b3e2251cd2e451d741 Mon Sep 17 00:00:00 2001 From: Patrick Date: Thu, 25 Jan 2024 02:14:32 +0800 Subject: [PATCH 50/74] clean up --- .../src/Ide/Plugin/SemanticTokens/Tokenize.hs | 34 ++++++------------- 1 file changed, 10 insertions(+), 24 deletions(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs index 8f3650195c..d5a9b4e7b6 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs @@ -1,4 +1,5 @@ -{-# LANGUAGE DeriveFunctor #-} + +{-# LANGUAGE NamedFieldPuns #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RankNTypes #-} @@ -14,7 +15,6 @@ import Control.Monad.State (MonadState (get), import Control.Monad.Trans.State (StateT) import qualified Data.Map as M import qualified Data.Map as Map -import Data.Set (Set) import qualified Data.Set as S import Data.Text (Text) import qualified Data.Text as T @@ -65,18 +65,9 @@ mkPTokenState vf = currentRangeContext = NoSplit ("", startRange) } -updateCursor :: (Monad m) => Char.Position -> Tokenizer m () -updateCursor pos = modify $ \s -> s {cursor = pos} - -updateRope :: (Monad m) => Rope -> Tokenizer m () -updateRope r = modify $ \s -> s {rope = r} - addRangeIdSetMap :: (Monad m) => Range -> Identifier -> Tokenizer m () addRangeIdSetMap r i = modify $ \s -> s {rangeIdSetMap = Map.insertWith (<>) r (S.singleton i) $ rangeIdSetMap s} -updateColumnsInUtf16 :: (Monad m) => UInt -> Tokenizer m () -updateColumnsInUtf16 n = modify $ \s -> s {columnsInUtf16 = n} - -- lift a Tokenizer Maybe () to Tokenizer m (), -- if the Maybe is Nothing, do nothing, recover the state -- if the Maybe is Just (), do the action, and keep the state @@ -91,10 +82,9 @@ hieAstSpanIdentifiers vf ast = runIdentity $ runTokenizer (foldAst ast) (mkPToke -- | foldAst -- visit every leaf node in the ast in depth first order foldAst :: (Monad m) => HieAST t -> Tokenizer m () -foldAst ast = do - if null (nodeChildren ast) - then liftMaybeM (visitLeafIds ast) - else mapM_ foldAst $ nodeChildren ast +foldAst ast = if null (nodeChildren ast) + then liftMaybeM (visitLeafIds ast) + else mapM_ foldAst $ nodeChildren ast visitLeafIds :: HieAST t -> Tokenizer Maybe () visitLeafIds leaf = liftMaybeM $ do @@ -137,26 +127,22 @@ focusTokenAt :: -- | (token, remains) Tokenizer Maybe (Range, Text) focusTokenAt leaf = do - rp <- gets rope - cur <- gets cursor - cs <- gets columnsInUtf16 + PTokenState{cursor, rope, columnsInUtf16} <- get let span = nodeSpan leaf let (startPos, length) = srcSpanMaybePositionLength span - let (gap, startRope) = first Rope.toText $ Rope.charSplitAtPosition (startPos `sub` cur) rp + let (gap, startRope) = first Rope.toText $ Rope.charSplitAtPosition (startPos `sub` cursor) rope (token, remains) <- lift $ charSplitAtMaybe length startRope - let ncs = newColumn cs gap + let ncs = newColumn columnsInUtf16 gap let nce = newColumn ncs token -- compute the new range for utf16 let ran = codePointRangeToRangeWith ncs nce $ realSrcSpanToCodePointRange span - updateColumnsInUtf16 nce - updateRope remains - updateCursor $ realSrcLocRopePosition $ realSrcSpanEnd span + modify $ \s -> s {columnsInUtf16 = nce, rope = remains, cursor = realSrcLocRopePosition (realSrcSpanEnd span)} return (ran, token) where srcSpanMaybePositionLength :: (Integral l) => RealSrcSpan -> (Char.Position, l) srcSpanMaybePositionLength real = ( realSrcLocRopePosition $ realSrcSpanStart real, - fromIntegral $ (srcLocCol $ realSrcSpanEnd real) - (srcLocCol $ realSrcSpanStart real) + fromIntegral $ srcLocCol (realSrcSpanEnd real) - srcLocCol (realSrcSpanStart real) ) charSplitAtMaybe :: Word -> Rope -> Maybe (Text, Rope) charSplitAtMaybe len rpe = do From 863ef268db03eae397929236b724f78dff1aab05 Mon Sep 17 00:00:00 2001 From: Patrick Date: Thu, 25 Jan 2024 03:04:44 +0800 Subject: [PATCH 51/74] only handle the leaf node with single column token --- .../src/Ide/Plugin/SemanticTokens/Tokenize.hs | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs index d5a9b4e7b6..ac4f6c5fc7 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs @@ -91,10 +91,10 @@ visitLeafIds leaf = liftMaybeM $ do (ran, token) <- focusTokenAt leaf -- we do not want to revert `focusTokenAt` on failure of `splitRangeByText` -- since the `focusTokenAt` properly update the state - liftMaybeM $ do - splitResult <- lift $ splitRangeByText token ran - modify $ \s -> s {currentRange = ran, currentRangeContext = splitResult} - mapM_ combineNodeIds $ Map.filterWithKey (\k _ -> k == SourceInfo) $ getSourcedNodeInfo $ sourcedNodeInfo leaf + -- todo + splitResult <- lift $ splitRangeByText token ran + modify $ \s -> s {currentRange = ran, currentRangeContext = splitResult} + mapM_ combineNodeIds $ Map.filterWithKey (\k _ -> k == SourceInfo) $ getSourcedNodeInfo $ sourcedNodeInfo leaf where combineNodeIds :: (Monad m) => NodeInfo a -> Tokenizer m () combineNodeIds (NodeInfo _ _ bd) = mapM_ getIdentifier (M.keys bd) @@ -129,6 +129,8 @@ focusTokenAt :: focusTokenAt leaf = do PTokenState{cursor, rope, columnsInUtf16} <- get let span = nodeSpan leaf + -- only handle the leaf node with single column token + guard $ srcSpanStartCol span /= srcSpanEndCol span let (startPos, length) = srcSpanMaybePositionLength span let (gap, startRope) = first Rope.toText $ Rope.charSplitAtPosition (startPos `sub` cursor) rope (token, remains) <- lift $ charSplitAtMaybe length startRope @@ -142,7 +144,7 @@ focusTokenAt leaf = do srcSpanMaybePositionLength :: (Integral l) => RealSrcSpan -> (Char.Position, l) srcSpanMaybePositionLength real = ( realSrcLocRopePosition $ realSrcSpanStart real, - fromIntegral $ srcLocCol (realSrcSpanEnd real) - srcLocCol (realSrcSpanStart real) + fromIntegral $ srcSpanEndCol real - srcSpanStartCol real ) charSplitAtMaybe :: Word -> Rope -> Maybe (Text, Rope) charSplitAtMaybe len rpe = do From e0a3ba70e369dd23fdae3203849761f091ac739a Mon Sep 17 00:00:00 2001 From: Patrick Date: Thu, 25 Jan 2024 03:07:25 +0800 Subject: [PATCH 52/74] cleanup --- .../src/Ide/Plugin/SemanticTokens/Tokenize.hs | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs index ac4f6c5fc7..567673846a 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs @@ -88,10 +88,10 @@ foldAst ast = if null (nodeChildren ast) visitLeafIds :: HieAST t -> Tokenizer Maybe () visitLeafIds leaf = liftMaybeM $ do + let span = nodeSpan leaf + -- only handle the leaf node with single column token + guard $ srcSpanStartCol span /= srcSpanEndCol span (ran, token) <- focusTokenAt leaf - -- we do not want to revert `focusTokenAt` on failure of `splitRangeByText` - -- since the `focusTokenAt` properly update the state - -- todo splitResult <- lift $ splitRangeByText token ran modify $ \s -> s {currentRange = ran, currentRangeContext = splitResult} mapM_ combineNodeIds $ Map.filterWithKey (\k _ -> k == SourceInfo) $ getSourcedNodeInfo $ sourcedNodeInfo leaf @@ -129,8 +129,6 @@ focusTokenAt :: focusTokenAt leaf = do PTokenState{cursor, rope, columnsInUtf16} <- get let span = nodeSpan leaf - -- only handle the leaf node with single column token - guard $ srcSpanStartCol span /= srcSpanEndCol span let (startPos, length) = srcSpanMaybePositionLength span let (gap, startRope) = first Rope.toText $ Rope.charSplitAtPosition (startPos `sub` cursor) rope (token, remains) <- lift $ charSplitAtMaybe length startRope From f6ad96736656c92b55a549a77f977afbe8e623a4 Mon Sep 17 00:00:00 2001 From: Patrick Date: Thu, 25 Jan 2024 03:08:10 +0800 Subject: [PATCH 53/74] fix --- .../src/Ide/Plugin/SemanticTokens/Tokenize.hs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs index 567673846a..cb23d5f658 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs @@ -90,7 +90,7 @@ visitLeafIds :: HieAST t -> Tokenizer Maybe () visitLeafIds leaf = liftMaybeM $ do let span = nodeSpan leaf -- only handle the leaf node with single column token - guard $ srcSpanStartCol span /= srcSpanEndCol span + guard $ srcSpanStartLine span == srcSpanEndLine span (ran, token) <- focusTokenAt leaf splitResult <- lift $ splitRangeByText token ran modify $ \s -> s {currentRange = ran, currentRangeContext = splitResult} From 7bd75146994dd6313bfef46a0a5104f6b9459056 Mon Sep 17 00:00:00 2001 From: Patrick Date: Fri, 26 Jan 2024 16:59:24 +0800 Subject: [PATCH 54/74] clean up --- .../src/Ide/Plugin/SemanticTokens/Tokenize.hs | 69 +++++++++++-------- .../hls-semantic-tokens-plugin/test/Main.hs | 3 +- .../test/testdata/TDoc.expected | 5 ++ .../test/testdata/TDoc.hs | 9 +++ 4 files changed, 58 insertions(+), 28 deletions(-) create mode 100644 plugins/hls-semantic-tokens-plugin/test/testdata/TDoc.expected create mode 100644 plugins/hls-semantic-tokens-plugin/test/testdata/TDoc.hs diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs index cb23d5f658..cf3105a8d1 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs @@ -6,7 +6,6 @@ module Ide.Plugin.SemanticTokens.Tokenize (hieAstSpanIdentifiers) where -import Control.Arrow (first) import Control.Lens (Identity (runIdentity)) import Control.Monad (forM_, guard) import Control.Monad.State (MonadState (get), @@ -89,12 +88,15 @@ foldAst ast = if null (nodeChildren ast) visitLeafIds :: HieAST t -> Tokenizer Maybe () visitLeafIds leaf = liftMaybeM $ do let span = nodeSpan leaf - -- only handle the leaf node with single column token - guard $ srcSpanStartLine span == srcSpanEndLine span (ran, token) <- focusTokenAt leaf - splitResult <- lift $ splitRangeByText token ran - modify $ \s -> s {currentRange = ran, currentRangeContext = splitResult} - mapM_ combineNodeIds $ Map.filterWithKey (\k _ -> k == SourceInfo) $ getSourcedNodeInfo $ sourcedNodeInfo leaf + -- if `focusTokenAt` succeed, we can safely assume we have shift the cursor correctly + -- we do not need to recover the cursor state, even if the following computation failed + liftMaybeM $ do + -- only handle the leaf node with single column token + guard $ srcSpanStartLine span == srcSpanEndLine span + splitResult <- lift $ splitRangeByText token ran + modify $ \s -> s {currentRange = ran, currentRangeContext = splitResult} + mapM_ combineNodeIds $ Map.filterWithKey (\k _ -> k == SourceInfo) $ getSourcedNodeInfo $ sourcedNodeInfo leaf where combineNodeIds :: (Monad m) => NodeInfo a -> Tokenizer m () combineNodeIds (NodeInfo _ _ bd) = mapM_ getIdentifier (M.keys bd) @@ -129,31 +131,40 @@ focusTokenAt :: focusTokenAt leaf = do PTokenState{cursor, rope, columnsInUtf16} <- get let span = nodeSpan leaf - let (startPos, length) = srcSpanMaybePositionLength span - let (gap, startRope) = first Rope.toText $ Rope.charSplitAtPosition (startPos `sub` cursor) rope - (token, remains) <- lift $ charSplitAtMaybe length startRope + -- traceShowM ("focusTokenAt", span) + let (startPos, endPos) = srcSpanCharPositions span + startOff <- lift $ startPos `sub` cursor + tokenOff <- lift $ endPos `sub` startPos + (gap, startRope) <- lift $ charSplitAtPositionMaybe startOff rope + (token, remains) <- lift $ charSplitAtPositionMaybe tokenOff startRope let ncs = newColumn columnsInUtf16 gap let nce = newColumn ncs token - -- compute the new range for utf16 + -- compute the new range for utf16, tuning the columns is enough let ran = codePointRangeToRangeWith ncs nce $ realSrcSpanToCodePointRange span - modify $ \s -> s {columnsInUtf16 = nce, rope = remains, cursor = realSrcLocRopePosition (realSrcSpanEnd span)} + modify $ \s -> s {columnsInUtf16 = nce, rope = remains, cursor = endPos} return (ran, token) where - srcSpanMaybePositionLength :: (Integral l) => RealSrcSpan -> (Char.Position, l) - srcSpanMaybePositionLength real = + srcSpanCharPositions :: RealSrcSpan -> (Char.Position, Char.Position) + srcSpanCharPositions real = ( realSrcLocRopePosition $ realSrcSpanStart real, - fromIntegral $ srcSpanEndCol real - srcSpanStartCol real + realSrcLocRopePosition $ realSrcSpanEnd real ) - charSplitAtMaybe :: Word -> Rope -> Maybe (Text, Rope) - charSplitAtMaybe len rpe = do - let (prefix, suffix) = Rope.charSplitAt len rpe - guard $ Rope.charLength prefix == len + charSplitAtPositionMaybe :: Char.Position -> Rope -> Maybe (Text, Rope) + charSplitAtPositionMaybe tokenOff rpe = do + let (prefix, suffix) = Rope.charSplitAtPosition tokenOff rpe + guard $ Rope.charLengthAsPosition prefix == tokenOff return (Rope.toText prefix, suffix) - sub :: Char.Position -> Char.Position -> Char.Position - sub (Char.Position l1 c1) (Char.Position l2 c2) = - if l1 == l2 then Char.Position 0 (c1 - c2) else Char.Position (l1 - l2) c1 + sub :: Char.Position -> Char.Position -> Maybe Char.Position + sub (Char.Position l1 c1) (Char.Position l2 c2) + | l1 == l2 || c1 > c2 = Just $ Char.Position 0 (c1 - c2) + | l1 > l2 = Just $ Char.Position (l1 - l2) c1 + | otherwise = Nothing realSrcLocRopePosition :: RealSrcLoc -> Char.Position realSrcLocRopePosition real = Char.Position (fromIntegral $ srcLocLine real - 1) (fromIntegral $ srcLocCol real - 1) + -- | newColumn + -- rope do not treat single \n in our favor + -- for example, the row length of "123\n" and "123" are both 1 + -- we are forced to use text to compute new column newColumn :: UInt -> Text -> UInt newColumn n rp = case T.breakOnEnd "\n" rp of ("", nEnd) -> n + utf16Length nEnd @@ -174,15 +185,19 @@ splitRangeByText tk ran = do Just ('`', xs) -> (subOneRange ran, T.takeWhile (/= '`') xs) _ -> (ran, tk) let (prefix, tk'') = T.breakOnEnd "." tk' - splitRange tk'' (utf16Length prefix) ran' + splitRange tk'' (utf16PositionPosition $ Rope.utf16LengthAsPosition $ Rope.fromText prefix) ran' where - splitRange :: Text -> UInt -> Range -> Maybe SplitResult - splitRange tx n r@(Range (Position l1 c1) (Position l2 c2)) - | l1 == l2, n <= 0 = Just $ NoSplit (tx, r) - | l1 == l2, n < fromIntegral (c2 - c1) = Just $ Split (tx, Range (Position l1 c1) (Position l1 (c1 + n)), Range (Position l1 (c1 + n)) (Position l1 c2)) - | otherwise = Nothing + splitRange :: Text -> Position -> Range -> Maybe SplitResult + splitRange tx (Position l c) r@(Range (Position l1 c1) (Position l2 c2)) + | l1 + l > l2 || (l1 + l == l2 && c > c2) = Nothing -- out of range + | l==0 && c==0 = Just $ NoSplit (tx, r) + | l==0 = Just $ Split (tx, Range (Position l1 c1) (Position l1 (c1+c)), Range (Position l1 (c1+c)) (Position l2 c2)) + | otherwise = Just $ Split (tx, Range (Position l1 c1) (Position (l1+l) c), Range (Position (l1+l) c) (Position l2 c2)) subOneRange :: Range -> Range subOneRange (Range (Position l1 c1) (Position l2 c2)) = Range (Position l1 (c1 + 1)) (Position l2 (c2 - 1)) + utf16PositionPosition :: Utf16.Position -> Position + utf16PositionPosition (Utf16.Position l c) = Position (fromIntegral l) (fromIntegral c) + utf16Length :: Integral i => Text -> i utf16Length = fromIntegral . Utf16.length . Utf16.fromText diff --git a/plugins/hls-semantic-tokens-plugin/test/Main.hs b/plugins/hls-semantic-tokens-plugin/test/Main.hs index 5692017692..dc3efb2b72 100644 --- a/plugins/hls-semantic-tokens-plugin/test/Main.hs +++ b/plugins/hls-semantic-tokens-plugin/test/Main.hs @@ -205,7 +205,8 @@ semanticTokensTests = goldenWithSemanticTokensWithDefaultConfig "pattern bind" "TPatternSynonym", goldenWithSemanticTokensWithDefaultConfig "type family" "TTypefamily", goldenWithSemanticTokensWithDefaultConfig "TUnicodeSyntax" "TUnicodeSyntax", - goldenWithSemanticTokensWithDefaultConfig "TQualifiedName" "TQualifiedName" + goldenWithSemanticTokensWithDefaultConfig "TQualifiedName" "TQualifiedName", + goldenWithSemanticTokensWithDefaultConfig "TDoc" "TDoc" ] semanticTokensDataTypeTests :: TestTree diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/TDoc.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/TDoc.expected new file mode 100644 index 0000000000..405308c3c8 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/TDoc.expected @@ -0,0 +1,5 @@ +4:5-10 TVariable "hello" +5:1-6 TVariable "hello" +5:10-13 TTypeConstructor "Int" +6:1-6 TVariable "hello" +6:9-15 TClassMethod "length" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/TDoc.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/TDoc.hs new file mode 100644 index 0000000000..dc5801b0e6 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/TDoc.hs @@ -0,0 +1,9 @@ +module TDoc where + +-- | +-- `hello` +hello :: Int +hello = length "Hello, Haskell!" + + + From e87a9ba4d59f30ee7a6e90323fd5eaf307a30a10 Mon Sep 17 00:00:00 2001 From: Patrick Date: Fri, 26 Jan 2024 17:23:19 +0800 Subject: [PATCH 55/74] fix sub --- .../src/Ide/Plugin/SemanticTokens/Tokenize.hs | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs index cf3105a8d1..a7d0548829 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs @@ -26,7 +26,7 @@ import Development.IDE.GHC.Compat import Development.IDE.GHC.Error (realSrcSpanToCodePointRange) import Ide.Plugin.SemanticTokens.Types (RangeIdSetMap) import Language.LSP.Protocol.Types (Position (Position), - Range (Range), UInt) + Range (Range), UInt, mkRange) import Language.LSP.VFS hiding (line) import Prelude hiding (length, span) @@ -131,7 +131,6 @@ focusTokenAt :: focusTokenAt leaf = do PTokenState{cursor, rope, columnsInUtf16} <- get let span = nodeSpan leaf - -- traceShowM ("focusTokenAt", span) let (startPos, endPos) = srcSpanCharPositions span startOff <- lift $ startPos `sub` cursor tokenOff <- lift $ endPos `sub` startPos @@ -156,7 +155,7 @@ focusTokenAt leaf = do return (Rope.toText prefix, suffix) sub :: Char.Position -> Char.Position -> Maybe Char.Position sub (Char.Position l1 c1) (Char.Position l2 c2) - | l1 == l2 || c1 > c2 = Just $ Char.Position 0 (c1 - c2) + | l1 == l2 && c1 > c2 = Just $ Char.Position 0 (c1 - c2) | l1 > l2 = Just $ Char.Position (l1 - l2) c1 | otherwise = Nothing realSrcLocRopePosition :: RealSrcLoc -> Char.Position @@ -191,8 +190,8 @@ splitRangeByText tk ran = do splitRange tx (Position l c) r@(Range (Position l1 c1) (Position l2 c2)) | l1 + l > l2 || (l1 + l == l2 && c > c2) = Nothing -- out of range | l==0 && c==0 = Just $ NoSplit (tx, r) - | l==0 = Just $ Split (tx, Range (Position l1 c1) (Position l1 (c1+c)), Range (Position l1 (c1+c)) (Position l2 c2)) - | otherwise = Just $ Split (tx, Range (Position l1 c1) (Position (l1+l) c), Range (Position (l1+l) c) (Position l2 c2)) + | otherwise = let c' = if l <= 0 then c1+c else c + in Just $ Split (tx, mkRange l1 c1 (l1 + l) c', mkRange (l1 + l) c' l2 c2) subOneRange :: Range -> Range subOneRange (Range (Position l1 c1) (Position l2 c2)) = Range (Position l1 (c1 + 1)) (Position l2 (c2 - 1)) utf16PositionPosition :: Utf16.Position -> Position From ade5574eefb0de2a207332a3278a443676d8ea15 Mon Sep 17 00:00:00 2001 From: Patrick Date: Fri, 26 Jan 2024 18:33:48 +0800 Subject: [PATCH 56/74] fix test for ghc92 --- plugins/hls-semantic-tokens-plugin/test/Main.hs | 17 ++++++----------- 1 file changed, 6 insertions(+), 11 deletions(-) diff --git a/plugins/hls-semantic-tokens-plugin/test/Main.hs b/plugins/hls-semantic-tokens-plugin/test/Main.hs index dc3efb2b72..adecdadf04 100644 --- a/plugins/hls-semantic-tokens-plugin/test/Main.hs +++ b/plugins/hls-semantic-tokens-plugin/test/Main.hs @@ -1,3 +1,4 @@ +{-# LANGUAGE CPP #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE ExplicitNamespaces #-} {-# LANGUAGE FlexibleInstances #-} @@ -166,12 +167,6 @@ semanticTokensTests = [ testCase "module import test" $ do let file1 = "TModula𐐀bA.hs" let file2 = "TModuleB.hs" - let expect = - [ - SemanticTokenOriginal TModule (Loc 3 8 8) "TModuleA", - SemanticTokenOriginal TVariable (Loc 5 1 2) "go", - SemanticTokenOriginal TDataConstructor (Loc 5 6 4) "Game" - ] Test.Hls.runSessionWithServerInTmpDir def semanticTokensPlugin (mkFs $ FS.directProjectMulti [file1, file2]) $ do doc1 <- openDoc file1 "haskell" doc2 <- openDoc file2 "haskell" @@ -186,9 +181,6 @@ semanticTokensTests = - textContent2 <- documentContents doc2 - let vfs = VirtualFile 0 0 (Rope.fromText textContent2) - res2 <- Test.getSemanticTokens doc2 result <- docSemanticTokensString def doc2 let expect = unlines [ "3:8-18 TModule \"TModula\\66560bA\"" @@ -205,8 +197,11 @@ semanticTokensTests = goldenWithSemanticTokensWithDefaultConfig "pattern bind" "TPatternSynonym", goldenWithSemanticTokensWithDefaultConfig "type family" "TTypefamily", goldenWithSemanticTokensWithDefaultConfig "TUnicodeSyntax" "TUnicodeSyntax", - goldenWithSemanticTokensWithDefaultConfig "TQualifiedName" "TQualifiedName", - goldenWithSemanticTokensWithDefaultConfig "TDoc" "TDoc" + goldenWithSemanticTokensWithDefaultConfig "TQualifiedName" "TQualifiedName" + -- it is not supported in ghc92 +#if MIN_VERSION_ghc(9,4,0) + , goldenWithSemanticTokensWithDefaultConfig "TDoc" "TDoc" +#endif ] semanticTokensDataTypeTests :: TestTree From d9960e17a6bff3233b14f42cd7871a10413c9b24 Mon Sep 17 00:00:00 2001 From: Patrick Date: Mon, 29 Jan 2024 02:37:03 +0800 Subject: [PATCH 57/74] use strict map and make range and split explicit --- .../src/Ide/Plugin/SemanticTokens/Tokenize.hs | 87 +++++++++---------- 1 file changed, 39 insertions(+), 48 deletions(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs index a7d0548829..909deff56f 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs @@ -6,43 +6,41 @@ module Ide.Plugin.SemanticTokens.Tokenize (hieAstSpanIdentifiers) where -import Control.Lens (Identity (runIdentity)) -import Control.Monad (forM_, guard) -import Control.Monad.State (MonadState (get), - MonadTrans (lift), execStateT, - gets, modify, put) -import Control.Monad.Trans.State (StateT) -import qualified Data.Map as M -import qualified Data.Map as Map -import qualified Data.Set as S -import Data.Text (Text) -import qualified Data.Text as T -import qualified Data.Text.Rope as Char -import Data.Text.Utf16.Rope (toText) -import qualified Data.Text.Utf16.Rope as Utf16 -import Data.Text.Utf16.Rope.Mixed (Rope) -import qualified Data.Text.Utf16.Rope.Mixed as Rope +import Control.Lens (Identity (runIdentity)) +import Control.Monad (forM_, guard) +import Control.Monad.State.Strict (MonadState (get), + MonadTrans (lift), + execStateT, modify, put) +import Control.Monad.Trans.State.Strict (StateT) +import qualified Data.Map as M +import qualified Data.Map as Map +import qualified Data.Set as S +import Data.Text (Text) +import qualified Data.Text as T +import qualified Data.Text.Rope as Char +import Data.Text.Utf16.Rope (toText) +import qualified Data.Text.Utf16.Rope as Utf16 +import Data.Text.Utf16.Rope.Mixed (Rope) +import qualified Data.Text.Utf16.Rope.Mixed as Rope import Development.IDE.GHC.Compat -import Development.IDE.GHC.Error (realSrcSpanToCodePointRange) -import Ide.Plugin.SemanticTokens.Types (RangeIdSetMap) -import Language.LSP.Protocol.Types (Position (Position), - Range (Range), UInt, mkRange) -import Language.LSP.VFS hiding (line) -import Prelude hiding (length, span) - -type Tokenizer m a = forall t. StateT (PTokenState t) m a - - -data PTokenState t = PTokenState - { rangeIdSetMap :: RangeIdSetMap, - rope :: Rope, - cursor :: Char.Position, - columnsInUtf16 :: UInt, - currentRange :: Range, - currentRangeContext :: SplitResult +import Development.IDE.GHC.Error (realSrcSpanToCodePointRange) +import Ide.Plugin.SemanticTokens.Types (RangeIdSetMap) +import Language.LSP.Protocol.Types (Position (Position), + Range (Range), UInt, mkRange) +import Language.LSP.VFS hiding (line) +import Prelude hiding (length, span) + +type Tokenizer m a = StateT PTokenState m a + + +data PTokenState = PTokenState + { rangeIdSetMap :: RangeIdSetMap, + rope :: Rope, + cursor :: !Char.Position, + columnsInUtf16 :: !UInt } -runTokenizer :: (Monad m) => Tokenizer m a -> PTokenState t -> m RangeIdSetMap +runTokenizer :: (Monad m) => Tokenizer m a -> PTokenState -> m RangeIdSetMap runTokenizer p st = rangeIdSetMap <$> execStateT p st data SplitResult @@ -50,18 +48,14 @@ data SplitResult | Split (Text, Range, Range) -- token text, prefix range(module range), token range deriving (Show) -startRange :: Range -startRange = Range (Position 0 0) (Position 0 0) -mkPTokenState :: VirtualFile -> PTokenState a +mkPTokenState :: VirtualFile -> PTokenState mkPTokenState vf = PTokenState { rangeIdSetMap = mempty, rope = Rope.fromText $ toText vf._file_text, cursor = Char.Position 0 0, - columnsInUtf16 = 0, - currentRange = startRange, - currentRangeContext = NoSplit ("", startRange) + columnsInUtf16 = 0 } addRangeIdSetMap :: (Monad m) => Range -> Identifier -> Tokenizer m () @@ -95,18 +89,15 @@ visitLeafIds leaf = liftMaybeM $ do -- only handle the leaf node with single column token guard $ srcSpanStartLine span == srcSpanEndLine span splitResult <- lift $ splitRangeByText token ran - modify $ \s -> s {currentRange = ran, currentRangeContext = splitResult} - mapM_ combineNodeIds $ Map.filterWithKey (\k _ -> k == SourceInfo) $ getSourcedNodeInfo $ sourcedNodeInfo leaf + mapM_ (combineNodeIds ran splitResult) $ Map.filterWithKey (\k _ -> k == SourceInfo) $ getSourcedNodeInfo $ sourcedNodeInfo leaf where - combineNodeIds :: (Monad m) => NodeInfo a -> Tokenizer m () - combineNodeIds (NodeInfo _ _ bd) = mapM_ getIdentifier (M.keys bd) - getIdentifier :: (Monad m) => Identifier -> Tokenizer m () - getIdentifier idt = liftMaybeM $ do - ran <- gets currentRange + combineNodeIds :: (Monad m) => Range -> SplitResult -> NodeInfo a -> Tokenizer m () + combineNodeIds ran ranSplit (NodeInfo _ _ bd) = mapM_ (getIdentifier ran ranSplit) (M.keys bd) + getIdentifier :: (Monad m) => Range -> SplitResult -> Identifier -> Tokenizer m () + getIdentifier ran ranSplit idt = liftMaybeM $ do case idt of Left _moduleName -> addRangeIdSetMap ran idt Right name -> do - ranSplit <- gets currentRangeContext occStr <- lift $ case (occNameString . nameOccName) name of -- the generated selector name with {-# LANGUAGE DuplicateRecordFields #-} '$' : 's' : 'e' : 'l' : ':' : xs -> Just $ takeWhile (/= ':') xs From 63fffa9843e399d7c498b246d3909a06d087c784 Mon Sep 17 00:00:00 2001 From: Patrick Date: Mon, 29 Jan 2024 02:39:09 +0800 Subject: [PATCH 58/74] use strict map --- .../src/Ide/Plugin/SemanticTokens/Types.hs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Types.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Types.hs index 8871c0dc5d..214069b1ed 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Types.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Types.hs @@ -17,7 +17,7 @@ import Control.DeepSeq (NFData (rnf), rwhnf) import qualified Data.Array as A import Data.Default (Default (def)) import Data.Generics (Typeable) -import qualified Data.Map as M +import qualified Data.Map.Strict as M import Development.IDE (Pretty (pretty), RuleResult) import qualified Development.IDE.Core.Shake as Shake import Development.IDE.GHC.Compat hiding (loc) @@ -25,7 +25,7 @@ import Development.IDE.Graph.Classes (Hashable) import GHC.Generics (Generic) import Language.LSP.Protocol.Types -- import template haskell -import Data.Map (Map) +import Data.Map.Strict (Map) import Data.Set (Set) import Language.Haskell.TH.Syntax (Lift) From 11e36ea58a90157de9041624fda86cea57e7960c Mon Sep 17 00:00:00 2001 From: Patrick Date: Mon, 29 Jan 2024 02:41:31 +0800 Subject: [PATCH 59/74] use strict map --- .../src/Ide/Plugin/SemanticTokens/Query.hs | 5 ++--- .../src/Ide/Plugin/SemanticTokens/Tokenize.hs | 4 ++-- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs index b97c797457..d7949dda3b 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs @@ -9,10 +9,9 @@ module Ide.Plugin.SemanticTokens.Query where import Data.Foldable (fold) -import qualified Data.Map as M -import qualified Data.Map as Map +import qualified Data.Map.Strict as M +import qualified Data.Map.Strict as Map import Data.Maybe (listToMaybe, mapMaybe) -import Data.Set (Set) import qualified Data.Set as Set import Data.Text (Text) import Development.IDE.Core.PositionMapping (PositionMapping, diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs index 909deff56f..5edf2fac48 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs @@ -12,8 +12,8 @@ import Control.Monad.State.Strict (MonadState (get), MonadTrans (lift), execStateT, modify, put) import Control.Monad.Trans.State.Strict (StateT) -import qualified Data.Map as M -import qualified Data.Map as Map +import qualified Data.Map.Strict as M +import qualified Data.Map.Strict as Map import qualified Data.Set as S import Data.Text (Text) import qualified Data.Text as T From b473abcb19aa08cf56c94bed4d2d255a5e068326 Mon Sep 17 00:00:00 2001 From: Patrick Date: Mon, 29 Jan 2024 02:54:28 +0800 Subject: [PATCH 60/74] cleanup --- .../src/Ide/Plugin/SemanticTokens/Internal.hs | 2 +- .../src/Ide/Plugin/SemanticTokens/Query.hs | 21 +++++++++---------- 2 files changed, 11 insertions(+), 12 deletions(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs index 518b37e90d..5123e76c06 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs @@ -107,7 +107,7 @@ getSemanticTokensRule recorder = virtualFile <- handleMaybeM LogNoVF $ getVirtualFile nfp -- get current location from the old ones let spanIdMap = M.filter (not . null) $ hieAstSpanIdentifiers virtualFile ast - let names = S.toList $ S.unions $ Map.elems spanIdMap + let names = S.unions $ Map.elems spanIdMap let localSemanticMap = mkLocalIdSemanticFromAst names (hieKindFunMasksKind hieKind) refMap -- get imported name semantic map let importedIdSemanticMap = foldr (getTypeExclude localSemanticMap getTyThingMap) mempty names diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs index d7949dda3b..847da4e61f 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs @@ -10,8 +10,8 @@ module Ide.Plugin.SemanticTokens.Query where import Data.Foldable (fold) import qualified Data.Map.Strict as M -import qualified Data.Map.Strict as Map import Data.Maybe (listToMaybe, mapMaybe) +import Data.Set (Set) import qualified Data.Set as Set import Data.Text (Text) import Development.IDE.Core.PositionMapping (PositionMapping, @@ -37,18 +37,17 @@ import Prelude hiding (length, span) --------------------------------------------------------- -mkLocalIdSemanticFromAst :: [Identifier] -> HieFunMaskKind a -> RefMap a -> IdSemanticMap -mkLocalIdSemanticFromAst names hieKind rm = M.fromList (mapMaybe (idIdSemanticFromHie hieKind rm) names) +mkLocalIdSemanticFromAst :: Set Identifier -> HieFunMaskKind a -> RefMap a -> IdSemanticMap +mkLocalIdSemanticFromAst names hieKind rm = M.mapMaybe (idIdSemanticFromHie hieKind rm) $ M.fromSet id names -idIdSemanticFromHie :: forall a. HieFunMaskKind a -> RefMap a -> Identifier -> Maybe (Identifier, HsSemanticTokenType) -idIdSemanticFromHie _ _ ns@(Left _) = Just (ns, TModule) -idIdSemanticFromHie hieKind rm ns@(Right _) = do - st <- idSemanticFromRefMap rm ns - return (ns, st) +idIdSemanticFromHie :: forall a. HieFunMaskKind a -> RefMap a -> Identifier -> Maybe HsSemanticTokenType +idIdSemanticFromHie _ _ (Left _) = Just TModule +idIdSemanticFromHie hieKind rm ns = do + idSemanticFromRefMap rm ns where idSemanticFromRefMap :: RefMap a -> Identifier -> Maybe HsSemanticTokenType idSemanticFromRefMap rm' name' = do - spanInfos <- Map.lookup name' rm' + spanInfos <- M.lookup name' rm' let typeTokenType = foldMap (typeSemantic hieKind) $ listToMaybe $ mapMaybe (identType . snd) spanInfos contextInfoTokenType <- foldMap (contextInfosMaybeTokenType . identInfo . snd) spanInfos fold [typeTokenType, Just contextInfoTokenType] @@ -64,13 +63,13 @@ idIdSemanticFromHie hieKind rm ns@(Right _) = do ------------------------------------------------- extractSemanticTokensFromNames :: IdSemanticMap -> RangeIdSetMap -> M.Map Range HsSemanticTokenType -extractSemanticTokensFromNames nsm = Map.mapMaybe (foldMap (`M.lookup` nsm)) +extractSemanticTokensFromNames nsm = M.mapMaybe (foldMap (`M.lookup` nsm)) rangeSemanticMapSemanticTokens :: SemanticTokensConfig -> PositionMapping -> M.Map Range HsSemanticTokenType -> Either Text SemanticTokens rangeSemanticMapSemanticTokens stc mapping = makeSemanticTokens defaultSemanticTokensLegend . mapMaybe (\(range, ty) -> flip toAbsSemanticToken ty <$> range) - . Map.toAscList + . M.toAscList . M.mapKeys (toCurrentRange mapping) where toAbsSemanticToken :: Range -> HsSemanticTokenType -> SemanticTokenAbsolute From c6d00238f96c59421e465020ff6af882657e2daf Mon Sep 17 00:00:00 2001 From: Patrick Date: Mon, 29 Jan 2024 03:10:39 +0800 Subject: [PATCH 61/74] add doc --- .../src/Ide/Plugin/SemanticTokens/Tokenize.hs | 22 +++++++++++-------- 1 file changed, 13 insertions(+), 9 deletions(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs index 5edf2fac48..305c4eb5ca 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs @@ -35,9 +35,9 @@ type Tokenizer m a = StateT PTokenState m a data PTokenState = PTokenState { rangeIdSetMap :: RangeIdSetMap, - rope :: Rope, - cursor :: !Char.Position, - columnsInUtf16 :: !UInt + rope :: Rope, -- the remains of rope we are working on + cursor :: !Char.Position, -- the cursor position of the current rope to the start of the original file in code point position + columnsInUtf16 :: !UInt -- the column of the start of the current rope in utf16 } runTokenizer :: (Monad m) => Tokenizer m a -> PTokenState -> m RangeIdSetMap @@ -122,16 +122,20 @@ focusTokenAt :: focusTokenAt leaf = do PTokenState{cursor, rope, columnsInUtf16} <- get let span = nodeSpan leaf - let (startPos, endPos) = srcSpanCharPositions span - startOff <- lift $ startPos `sub` cursor - tokenOff <- lift $ endPos `sub` startPos - (gap, startRope) <- lift $ charSplitAtPositionMaybe startOff rope - (token, remains) <- lift $ charSplitAtPositionMaybe tokenOff startRope + let (tokenStartPos, tokenEndPos) = srcSpanCharPositions span + -- tokenStartOff: the offset position of the token start position to the cursor position + tokenStartOff <- lift $ tokenStartPos `sub` cursor + -- tokenOff: the offset position of the token end position to the token start position + tokenOff <- lift $ tokenEndPos `sub` tokenStartPos + (gap, tokenStartRope) <- lift $ charSplitAtPositionMaybe tokenStartOff rope + (token, remains) <- lift $ charSplitAtPositionMaybe tokenOff tokenStartRope + -- ncs: token start column in utf16 let ncs = newColumn columnsInUtf16 gap + -- nce: token end column in utf16 let nce = newColumn ncs token -- compute the new range for utf16, tuning the columns is enough let ran = codePointRangeToRangeWith ncs nce $ realSrcSpanToCodePointRange span - modify $ \s -> s {columnsInUtf16 = nce, rope = remains, cursor = endPos} + modify $ \s -> s {columnsInUtf16 = nce, rope = remains, cursor = tokenEndPos} return (ran, token) where srcSpanCharPositions :: RealSrcSpan -> (Char.Position, Char.Position) From 01c80999bfd331390f85b6390af506bddbcb7447 Mon Sep 17 00:00:00 2001 From: Patrick Date: Mon, 29 Jan 2024 03:30:16 +0800 Subject: [PATCH 62/74] handle more DerivedOccName --- .../src/Ide/Plugin/SemanticTokens/Tokenize.hs | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs index 305c4eb5ca..eabead5d3b 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs @@ -98,19 +98,20 @@ visitLeafIds leaf = liftMaybeM $ do case idt of Left _moduleName -> addRangeIdSetMap ran idt Right name -> do - occStr <- lift $ case (occNameString . nameOccName) name of + occStr <- lift $ T.pack <$> case (occNameString . nameOccName) name of -- the generated selector name with {-# LANGUAGE DuplicateRecordFields #-} '$' : 's' : 'e' : 'l' : ':' : xs -> Just $ takeWhile (/= ':') xs ['$'] -> Just "$" -- other generated names that should not be visible '$' : _ -> Nothing + _c : ':' : _ -> Nothing ns -> Just ns case ranSplit of (NoSplit (tk, r)) -> do - guard $ T.unpack tk == occStr + guard $ tk == occStr addRangeIdSetMap r idt (Split (tk, r1, r2)) -> do - guard $ T.unpack tk == occStr + guard $ tk == occStr addRangeIdSetMap r1 (Left $ mkModuleName "") addRangeIdSetMap r2 idt From 14d6be89236e5af349335f1a63fd3a09aac1ae1b Mon Sep 17 00:00:00 2001 From: Patrick Date: Mon, 29 Jan 2024 03:41:35 +0800 Subject: [PATCH 63/74] use strict map --- .../src/Ide/Plugin/SemanticTokens/Internal.hs | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs index 5123e76c06..2023231ff8 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs @@ -23,9 +23,8 @@ import Control.Monad.Except (ExceptT, liftEither, withExceptT) import Control.Monad.Trans (lift) import Control.Monad.Trans.Except (runExceptT) -import Data.Map (Map) -import qualified Data.Map as M -import qualified Data.Map as Map +import Data.Map.Strict (Map) +import qualified Data.Map.Strict as M import qualified Data.Set as S import Development.IDE (Action, GetDocMap (GetDocMap), @@ -103,11 +102,11 @@ getSemanticTokensRule recorder = define (cmapWithPrio LogShake recorder) $ \GetSemanticTokens nfp -> handleError recorder $ do (HAR {..}) <- lift $ use_ GetHieAst nfp (DKMap {getTyThingMap}, _) <- lift $ useWithStale_ GetDocMap nfp - ast <- handleMaybe (LogNoAST $ show nfp) $ getAsts hieAst Map.!? (HiePath . mkFastString . fromNormalizedFilePath) nfp + ast <- handleMaybe (LogNoAST $ show nfp) $ getAsts hieAst M.!? (HiePath . mkFastString . fromNormalizedFilePath) nfp virtualFile <- handleMaybeM LogNoVF $ getVirtualFile nfp -- get current location from the old ones let spanIdMap = M.filter (not . null) $ hieAstSpanIdentifiers virtualFile ast - let names = S.unions $ Map.elems spanIdMap + let names = S.unions $ M.elems spanIdMap let localSemanticMap = mkLocalIdSemanticFromAst names (hieKindFunMasksKind hieKind) refMap -- get imported name semantic map let importedIdSemanticMap = foldr (getTypeExclude localSemanticMap getTyThingMap) mempty names From dce593adf41db0f56e433f7a7ce3b5daea170cf5 Mon Sep 17 00:00:00 2001 From: Patrick Date: Mon, 29 Jan 2024 03:43:36 +0800 Subject: [PATCH 64/74] use strict map --- .../src/Ide/Plugin/SemanticTokens/Mappings.hs | 2 +- .../src/Ide/Plugin/SemanticTokens/Utils.hs | 2 +- plugins/hls-semantic-tokens-plugin/test/Main.hs | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Mappings.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Mappings.hs index 27db5a0894..1003708b41 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Mappings.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Mappings.hs @@ -13,7 +13,7 @@ module Ide.Plugin.SemanticTokens.Mappings where import qualified Data.Array as A import Data.List.Extra (chunksOf, (!?)) -import qualified Data.Map as Map +import qualified Data.Map.Strict as Map import Data.Maybe (mapMaybe) import qualified Data.Set as Set import Data.Text (Text, unpack) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs index f6b4657e30..7b22284850 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs @@ -10,7 +10,7 @@ module Ide.Plugin.SemanticTokens.Utils where import Data.ByteString (ByteString) import Data.ByteString.Char8 (unpack) -import qualified Data.Map as Map +import qualified Data.Map.Strict as Map import Development.IDE (Position (..), Range (..)) import Development.IDE.GHC.Compat import Prelude hiding (length, span) diff --git a/plugins/hls-semantic-tokens-plugin/test/Main.hs b/plugins/hls-semantic-tokens-plugin/test/Main.hs index adecdadf04..b5e3c8edd1 100644 --- a/plugins/hls-semantic-tokens-plugin/test/Main.hs +++ b/plugins/hls-semantic-tokens-plugin/test/Main.hs @@ -12,7 +12,7 @@ import Data.Aeson (KeyValue (..), Value (..), object) import Data.Default import Data.Functor (void) -import Data.Map as Map hiding (map) +import Data.Map.Strict as Map hiding (map) import Data.String (fromString) import Data.Text hiding (length, map, unlines) From 281850d484c593290b2dfec2e0e8bc7266dd1a85 Mon Sep 17 00:00:00 2001 From: Patrick Date: Mon, 29 Jan 2024 03:46:38 +0800 Subject: [PATCH 65/74] make field strict --- .../src/Ide/Plugin/SemanticTokens/Tokenize.hs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs index eabead5d3b..1834db4186 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs @@ -34,8 +34,8 @@ type Tokenizer m a = StateT PTokenState m a data PTokenState = PTokenState - { rangeIdSetMap :: RangeIdSetMap, - rope :: Rope, -- the remains of rope we are working on + { rangeIdSetMap :: !RangeIdSetMap, + rope :: !Rope, -- the remains of rope we are working on cursor :: !Char.Position, -- the cursor position of the current rope to the start of the original file in code point position columnsInUtf16 :: !UInt -- the column of the start of the current rope in utf16 } From bef5a2a2dc9da30b690c4fe5c64bf5055adb4b44 Mon Sep 17 00:00:00 2001 From: Patrick Date: Mon, 29 Jan 2024 04:20:42 +0800 Subject: [PATCH 66/74] cleanup --- .../src/Ide/Plugin/SemanticTokens/Tokenize.hs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs index 1834db4186..eabead5d3b 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs @@ -34,8 +34,8 @@ type Tokenizer m a = StateT PTokenState m a data PTokenState = PTokenState - { rangeIdSetMap :: !RangeIdSetMap, - rope :: !Rope, -- the remains of rope we are working on + { rangeIdSetMap :: RangeIdSetMap, + rope :: Rope, -- the remains of rope we are working on cursor :: !Char.Position, -- the cursor position of the current rope to the start of the original file in code point position columnsInUtf16 :: !UInt -- the column of the start of the current rope in utf16 } From 47338cca4a2fe6281e23a32549c6b4127d0e8e1f Mon Sep 17 00:00:00 2001 From: Patrick Date: Mon, 29 Jan 2024 04:29:07 +0800 Subject: [PATCH 67/74] make rope strict in PTokenState --- .../src/Ide/Plugin/SemanticTokens/Tokenize.hs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs index eabead5d3b..5104fbfbde 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs @@ -35,7 +35,7 @@ type Tokenizer m a = StateT PTokenState m a data PTokenState = PTokenState { rangeIdSetMap :: RangeIdSetMap, - rope :: Rope, -- the remains of rope we are working on + rope :: !Rope, -- the remains of rope we are working on cursor :: !Char.Position, -- the cursor position of the current rope to the start of the original file in code point position columnsInUtf16 :: !UInt -- the column of the start of the current rope in utf16 } From 72467a8808bddd7330abf2523349356178b228ec Mon Sep 17 00:00:00 2001 From: Patrick Date: Mon, 29 Jan 2024 04:29:56 +0800 Subject: [PATCH 68/74] make map strict in PTokenState --- .../src/Ide/Plugin/SemanticTokens/Tokenize.hs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs index 5104fbfbde..1834db4186 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs @@ -34,7 +34,7 @@ type Tokenizer m a = StateT PTokenState m a data PTokenState = PTokenState - { rangeIdSetMap :: RangeIdSetMap, + { rangeIdSetMap :: !RangeIdSetMap, rope :: !Rope, -- the remains of rope we are working on cursor :: !Char.Position, -- the cursor position of the current rope to the start of the original file in code point position columnsInUtf16 :: !UInt -- the column of the start of the current rope in utf16 From eea7f9fe10488e944c90df51567e11c461a352e8 Mon Sep 17 00:00:00 2001 From: Patrick Date: Mon, 29 Jan 2024 05:51:55 +0800 Subject: [PATCH 69/74] use fromSet for importedIdSemanticMap --- .../src/Ide/Plugin/SemanticTokens/Internal.hs | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs index 2023231ff8..851549855b 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs @@ -109,24 +109,22 @@ getSemanticTokensRule recorder = let names = S.unions $ M.elems spanIdMap let localSemanticMap = mkLocalIdSemanticFromAst names (hieKindFunMasksKind hieKind) refMap -- get imported name semantic map - let importedIdSemanticMap = foldr (getTypeExclude localSemanticMap getTyThingMap) mempty names + let importedIdSemanticMap = M.mapMaybe id + $ M.fromSet (getTypeExclude getTyThingMap) (names `S.difference` M.keysSet localSemanticMap) let sMap = M.unionWith (<>) importedIdSemanticMap localSemanticMap let rangeTokenType = extractSemanticTokensFromNames sMap spanIdMap return $ RangeHsSemanticTokenTypes rangeTokenType where -- ignore one already in discovered in local getTypeExclude :: - Map Identifier a -> NameEnv TyThing -> Identifier -> - Map Identifier HsSemanticTokenType -> - Map Identifier HsSemanticTokenType - getTypeExclude localEnv tyThingMap n nameMap - | n `M.member` localEnv = nameMap + Maybe HsSemanticTokenType + getTypeExclude tyThingMap n | (Right name) <- n = let tyThing = lookupNameEnv tyThingMap name - in maybe nameMap (\k -> M.insert n k nameMap) (tyThing >>= tyThingSemantic) - | otherwise = nameMap + in (tyThing >>= tyThingSemantic) + | otherwise = Nothing -- | Persistent rule to ensure that semantic tokens doesn't block on startup persistentGetSemanticTokensRule :: Rules () From eb5f9f00410f50c0c3e08037ef19d4ff1a605684 Mon Sep 17 00:00:00 2001 From: Patrick Date: Mon, 29 Jan 2024 16:05:32 +0800 Subject: [PATCH 70/74] add guard --- .../src/Ide/Plugin/SemanticTokens/Internal.hs | 2 +- .../src/Ide/Plugin/SemanticTokens/Tokenize.hs | 6 ++++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs index 851549855b..4c237a297c 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs @@ -122,7 +122,7 @@ getSemanticTokensRule recorder = Maybe HsSemanticTokenType getTypeExclude tyThingMap n | (Right name) <- n = - let tyThing = lookupNameEnv tyThingMap name + let tyThing = lookupNameEnv tyThingMap name in (tyThing >>= tyThingSemantic) | otherwise = Nothing diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs index 1834db4186..5cceb33867 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs @@ -1,4 +1,3 @@ - {-# LANGUAGE NamedFieldPuns #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} @@ -12,11 +11,13 @@ import Control.Monad.State.Strict (MonadState (get), MonadTrans (lift), execStateT, modify, put) import Control.Monad.Trans.State.Strict (StateT) +import Data.Char (isAlpha) import qualified Data.Map.Strict as M import qualified Data.Map.Strict as Map import qualified Data.Set as S import Data.Text (Text) import qualified Data.Text as T +import Data.Text.Encoding (encodeUtf8) import qualified Data.Text.Rope as Char import Data.Text.Utf16.Rope (toText) import qualified Data.Text.Utf16.Rope as Utf16 @@ -24,6 +25,7 @@ import Data.Text.Utf16.Rope.Mixed (Rope) import qualified Data.Text.Utf16.Rope.Mixed as Rope import Development.IDE.GHC.Compat import Development.IDE.GHC.Error (realSrcSpanToCodePointRange) +import Development.IDE.Types.Exports (renderOcc) import Ide.Plugin.SemanticTokens.Types (RangeIdSetMap) import Language.LSP.Protocol.Types (Position (Position), Range (Range), UInt, mkRange) @@ -104,7 +106,7 @@ visitLeafIds leaf = liftMaybeM $ do ['$'] -> Just "$" -- other generated names that should not be visible '$' : _ -> Nothing - _c : ':' : _ -> Nothing + c : ':' : _ | isAlpha c -> Nothing ns -> Just ns case ranSplit of (NoSplit (tk, r)) -> do From c605b26ecb0f52dd6aa218e3575cf2be008076e6 Mon Sep 17 00:00:00 2001 From: Patrick Date: Mon, 29 Jan 2024 16:29:32 +0800 Subject: [PATCH 71/74] cleanup --- .../src/Ide/Plugin/SemanticTokens/Tokenize.hs | 2 -- 1 file changed, 2 deletions(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs index 5cceb33867..308e0e6959 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs @@ -17,7 +17,6 @@ import qualified Data.Map.Strict as Map import qualified Data.Set as S import Data.Text (Text) import qualified Data.Text as T -import Data.Text.Encoding (encodeUtf8) import qualified Data.Text.Rope as Char import Data.Text.Utf16.Rope (toText) import qualified Data.Text.Utf16.Rope as Utf16 @@ -25,7 +24,6 @@ import Data.Text.Utf16.Rope.Mixed (Rope) import qualified Data.Text.Utf16.Rope.Mixed as Rope import Development.IDE.GHC.Compat import Development.IDE.GHC.Error (realSrcSpanToCodePointRange) -import Development.IDE.Types.Exports (renderOcc) import Ide.Plugin.SemanticTokens.Types (RangeIdSetMap) import Language.LSP.Protocol.Types (Position (Position), Range (Range), UInt, mkRange) From fa8ca2ca3f9817852d835a813f859c44834ea17e Mon Sep 17 00:00:00 2001 From: Patrick Date: Mon, 29 Jan 2024 16:31:54 +0800 Subject: [PATCH 72/74] fix --- .../src/Ide/Plugin/SemanticTokens/Tokenize.hs | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs index 308e0e6959..d4c3882884 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs @@ -11,7 +11,7 @@ import Control.Monad.State.Strict (MonadState (get), MonadTrans (lift), execStateT, modify, put) import Control.Monad.Trans.State.Strict (StateT) -import Data.Char (isAlpha) +import Data.Char (isAlpha, isAlphaNum) import qualified Data.Map.Strict as M import qualified Data.Map.Strict as Map import qualified Data.Set as S @@ -101,10 +101,9 @@ visitLeafIds leaf = liftMaybeM $ do occStr <- lift $ T.pack <$> case (occNameString . nameOccName) name of -- the generated selector name with {-# LANGUAGE DuplicateRecordFields #-} '$' : 's' : 'e' : 'l' : ':' : xs -> Just $ takeWhile (/= ':') xs - ['$'] -> Just "$" -- other generated names that should not be visible - '$' : _ -> Nothing - c : ':' : _ | isAlpha c -> Nothing + '$' : c : _ | isAlphaNum c -> Nothing + c : ':' : _ | isAlphaNum c -> Nothing ns -> Just ns case ranSplit of (NoSplit (tk, r)) -> do From 73c9bbe37cd2b8c87b3ec8197374e4b0b8323fa0 Mon Sep 17 00:00:00 2001 From: Patrick Date: Mon, 29 Jan 2024 17:07:48 +0800 Subject: [PATCH 73/74] rename --- .../src/Ide/Plugin/SemanticTokens/Internal.hs | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs index 4c237a297c..881221bb04 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs @@ -110,17 +110,16 @@ getSemanticTokensRule recorder = let localSemanticMap = mkLocalIdSemanticFromAst names (hieKindFunMasksKind hieKind) refMap -- get imported name semantic map let importedIdSemanticMap = M.mapMaybe id - $ M.fromSet (getTypeExclude getTyThingMap) (names `S.difference` M.keysSet localSemanticMap) + $ M.fromSet (getTypeThing getTyThingMap) (names `S.difference` M.keysSet localSemanticMap) let sMap = M.unionWith (<>) importedIdSemanticMap localSemanticMap let rangeTokenType = extractSemanticTokensFromNames sMap spanIdMap return $ RangeHsSemanticTokenTypes rangeTokenType where - -- ignore one already in discovered in local - getTypeExclude :: + getTypeThing :: NameEnv TyThing -> Identifier -> Maybe HsSemanticTokenType - getTypeExclude tyThingMap n + getTypeThing tyThingMap n | (Right name) <- n = let tyThing = lookupNameEnv tyThingMap name in (tyThing >>= tyThingSemantic) From 9b717dbecef0ffcdfa8ec80f152bcaf53e8f194f Mon Sep 17 00:00:00 2001 From: Patrick Date: Mon, 29 Jan 2024 21:51:14 +0800 Subject: [PATCH 74/74] fix --- haskell-language-server.cabal | 52 ++++++++++++++++++----------------- 1 file changed, 27 insertions(+), 25 deletions(-) diff --git a/haskell-language-server.cabal b/haskell-language-server.cabal index ca3ff2030d..21b46482ca 100644 --- a/haskell-language-server.cabal +++ b/haskell-language-server.cabal @@ -140,7 +140,7 @@ flag cabal common cabal if flag(cabal) - build-depends: hls-cabal-plugin + build-depends: hls-cabal-plugin cpp-options: -Dhls_cabal library hls-cabal-plugin @@ -223,7 +223,7 @@ flag class common class if flag(class) - build-depends: hls-class-plugin + build-depends: hls-class-plugin cpp-options: -Dhls_class library hls-class-plugin @@ -287,7 +287,7 @@ flag callHierarchy common callHierarchy if flag(callHierarchy) - build-depends: hls-call-hierarchy-plugin + build-depends: hls-call-hierarchy-plugin cpp-options: -Dhls_callHierarchy library hls-call-hierarchy-plugin @@ -348,7 +348,7 @@ flag eval common eval if flag(eval) - build-depends: hls-eval-plugin + build-depends: hls-eval-plugin cpp-options: -Dhls_eval library hls-eval-plugin @@ -429,7 +429,7 @@ test-suite hls-eval-plugin-tests common importLens if flag(importLens) - build-depends: hls-explicit-imports-plugin + build-depends: hls-explicit-imports-plugin cpp-options: -Dhls_importLens flag importLens @@ -494,7 +494,7 @@ flag rename common rename if flag(rename) - build-depends: hls-rename-plugin + build-depends: hls-rename-plugin cpp-options: -Dhls_rename library hls-rename-plugin @@ -550,7 +550,7 @@ flag retrie common retrie if flag(retrie) - build-depends: hls-retrie-plugin + build-depends: hls-retrie-plugin cpp-options: -Dhls_retrie library hls-retrie-plugin @@ -615,7 +615,7 @@ flag hlint common hlint if flag(hlint) && (impl(ghc < 9.8.0) || flag(ignore-plugins-ghc-bounds)) - build-depends: hls-hlint-plugin + build-depends: hls-hlint-plugin cpp-options: -Dhls_hlint library hls-hlint-plugin @@ -695,7 +695,7 @@ flag stan common stan if flag(stan) && (impl(ghc > 8.8.1) && impl(ghc <= 9.2.3) || impl(ghc >= 9.4.0) && impl(ghc < 9.10.0)) - build-depends: hls-stan-plugin + build-depends: hls-stan-plugin cpp-options: -Dhls_stan library hls-stan-plugin @@ -769,7 +769,7 @@ flag moduleName common moduleName if flag(moduleName) - build-depends: hls-module-name-plugin + build-depends: hls-module-name-plugin cpp-options: -Dhls_moduleName library hls-module-name-plugin @@ -814,7 +814,7 @@ flag pragmas common pragmas if flag(pragmas) - build-depends: hls-pragmas-plugin + build-depends: hls-pragmas-plugin cpp-options: -Dhls_pragmas library hls-pragmas-plugin @@ -862,7 +862,7 @@ flag splice common splice if flag(splice) - build-depends: hls-splice-plugin + build-depends: hls-splice-plugin cpp-options: -Dhls_splice library hls-splice-plugin @@ -1040,7 +1040,7 @@ flag codeRange common codeRange if flag(codeRange) - build-depends: hls-code-range-plugin + build-depends: hls-code-range-plugin cpp-options: -Dhls_codeRange library hls-code-range-plugin @@ -1100,7 +1100,7 @@ flag changeTypeSignature common changeTypeSignature if flag(changeTypeSignature) - build-depends: hls-change-type-signature-plugin + build-depends: hls-change-type-signature-plugin cpp-options: -Dhls_changeTypeSignature library hls-change-type-signature-plugin @@ -1160,7 +1160,7 @@ flag gadt common gadt if flag(gadt) - build-depends: hls-gadt-plugin + build-depends: hls-gadt-plugin cpp-options: -Dhls_gadt library hls-gadt-plugin @@ -1213,7 +1213,7 @@ flag explicitFixity common explicitFixity if flag(explicitFixity) - build-depends: hls-explicit-fixity-plugin + build-depends: hls-explicit-fixity-plugin cpp-options: -DexplicitFixity library hls-explicit-fixity-plugin @@ -1260,7 +1260,7 @@ flag explicitFields common explicitFields if flag(explicitFields) - build-depends: hls-explicit-record-fields-plugin + build-depends: hls-explicit-record-fields-plugin cpp-options: -DexplicitFields library hls-explicit-record-fields-plugin @@ -1284,7 +1284,7 @@ library hls-explicit-record-fields-plugin if flag(pedantic) ghc-options: -Werror -Wwarn=incomplete-record-updates - + test-suite hls-explicit-record-fields-plugin-tests import: warnings default-language: Haskell2010 @@ -1309,7 +1309,7 @@ flag overloadedRecordDot common overloadedRecordDot if flag(overloadedRecordDot) - build-depends: hls-overloaded-record-dot-plugin + build-depends: hls-overloaded-record-dot-plugin cpp-options: -Dhls_overloaded_record_dot library hls-overloaded-record-dot-plugin @@ -1356,7 +1356,7 @@ flag floskell common floskell if flag(floskell) && (impl(ghc < 9.7) || flag(ignore-plugins-ghc-bounds)) - build-depends: hls-floskell-plugin + build-depends: hls-floskell-plugin cpp-options: -Dhls_floskell library hls-floskell-plugin @@ -1398,7 +1398,7 @@ flag fourmolu common fourmolu if flag(fourmolu) - build-depends: hls-fourmolu-plugin + build-depends: hls-fourmolu-plugin cpp-options: -Dhls_fourmolu library hls-fourmolu-plugin @@ -1451,7 +1451,7 @@ flag ormolu common ormolu if flag(ormolu) - build-depends: hls-ormolu-plugin + build-depends: hls-ormolu-plugin cpp-options: -Dhls_ormolu library hls-ormolu-plugin @@ -1504,7 +1504,7 @@ flag stylishHaskell common stylishHaskell if flag(stylishHaskell) && (impl(ghc < 9.8.0) || flag(ignore-plugins-ghc-bounds)) - build-depends: hls-stylish-haskell-plugin + build-depends: hls-stylish-haskell-plugin cpp-options: -Dhls_stylishHaskell library hls-stylish-haskell-plugin @@ -1549,7 +1549,7 @@ flag refactor common refactor if flag(refactor) - build-depends: hls-refactor-plugin + build-depends: hls-refactor-plugin cpp-options: -Dhls_refactor library hls-refactor-plugin @@ -1665,7 +1665,7 @@ flag semanticTokens common semanticTokens if flag(semanticTokens) - build-depends: hls-semantic-tokens-plugin + build-depends: hls-semantic-tokens-plugin cpp-options: -Dhls_semanticTokens library hls-semantic-tokens-plugin @@ -1679,6 +1679,7 @@ library hls-semantic-tokens-plugin Ide.Plugin.SemanticTokens.Query Ide.Plugin.SemanticTokens.SemanticConfig Ide.Plugin.SemanticTokens.Utils + Ide.Plugin.SemanticTokens.Tokenize Ide.Plugin.SemanticTokens.Internal hs-source-dirs: plugins/hls-semantic-tokens-plugin/src @@ -1688,6 +1689,7 @@ library hls-semantic-tokens-plugin , containers , extra , hiedb + , text-rope , mtl >= 2.2 , ghcide == 2.6.0.0 , hls-plugin-api == 2.6.0.0