|
| 1 | +"""Canonical semantic token legend and normalization utilities.""" |
| 2 | + |
| 3 | +from __future__ import annotations |
| 4 | + |
| 5 | +from . import types |
| 6 | + |
| 7 | +# Canonical token types (LSP standard order, then backend-specific) |
| 8 | +CANONICAL_TOKEN_TYPES: list[str] = [ |
| 9 | + # LSP standard (SemanticTokenTypes enum order) |
| 10 | + "namespace", # 0 |
| 11 | + "type", # 1 |
| 12 | + "class", # 2 |
| 13 | + "enum", # 3 |
| 14 | + "interface", # 4 |
| 15 | + "struct", # 5 |
| 16 | + "typeParameter", # 6 |
| 17 | + "parameter", # 7 |
| 18 | + "variable", # 8 |
| 19 | + "property", # 9 |
| 20 | + "enumMember", # 10 |
| 21 | + "event", # 11 |
| 22 | + "function", # 12 |
| 23 | + "method", # 13 |
| 24 | + "macro", # 14 |
| 25 | + "keyword", # 15 |
| 26 | + "modifier", # 16 |
| 27 | + "comment", # 17 |
| 28 | + "string", # 18 |
| 29 | + "number", # 19 |
| 30 | + "regexp", # 20 |
| 31 | + "operator", # 21 |
| 32 | + "decorator", # 22 |
| 33 | + "label", # 23 (LSP standard) |
| 34 | + # Backend-specific (appended) |
| 35 | + "selfParameter", # 24 (pyright, ty) |
| 36 | + "clsParameter", # 25 (pyright, ty) |
| 37 | + "builtinConstant", # 26 (ty) |
| 38 | +] |
| 39 | + |
| 40 | +# Canonical token modifiers (LSP standard order, then backend-specific) |
| 41 | +CANONICAL_TOKEN_MODIFIERS: list[str] = [ |
| 42 | + # LSP standard (SemanticTokenModifiers enum order) |
| 43 | + "declaration", # bit 0 |
| 44 | + "definition", # bit 1 |
| 45 | + "readonly", # bit 2 |
| 46 | + "static", # bit 3 |
| 47 | + "deprecated", # bit 4 |
| 48 | + "abstract", # bit 5 |
| 49 | + "async", # bit 6 |
| 50 | + "modification", # bit 7 |
| 51 | + "documentation", # bit 8 |
| 52 | + "defaultLibrary", # bit 9 |
| 53 | + # Backend-specific (appended) |
| 54 | + "builtin", # bit 10 (pyright) |
| 55 | + "classMember", # bit 11 (pyright) |
| 56 | + "parameter", # bit 12 (pyright - modifier, not to be confused with type) |
| 57 | +] |
| 58 | + |
| 59 | +# The canonical legend for Monaco/editor integration |
| 60 | +CANONICAL_LEGEND: types.SemanticTokensLegend = { |
| 61 | + "tokenTypes": CANONICAL_TOKEN_TYPES, |
| 62 | + "tokenModifiers": CANONICAL_TOKEN_MODIFIERS, |
| 63 | +} |
| 64 | + |
| 65 | +# Build lookup tables for canonical indices |
| 66 | +_CANONICAL_TYPE_INDEX: dict[str, int] = { |
| 67 | + name: idx for idx, name in enumerate(CANONICAL_TOKEN_TYPES) |
| 68 | +} |
| 69 | +_CANONICAL_MODIFIER_INDEX: dict[str, int] = { |
| 70 | + name: idx for idx, name in enumerate(CANONICAL_TOKEN_MODIFIERS) |
| 71 | +} |
| 72 | + |
| 73 | +# Pyrefly legend (server doesn't advertise it via LSP) |
| 74 | +# Source: https://github.com/facebook/pyrefly/blob/main/pyrefly/lib/state/semantic_tokens.rs |
| 75 | +PYREFLY_LEGEND: types.SemanticTokensLegend = { |
| 76 | + "tokenTypes": [ |
| 77 | + "namespace", # 0 |
| 78 | + "type", # 1 |
| 79 | + "class", # 2 |
| 80 | + "enum", # 3 |
| 81 | + "interface", # 4 |
| 82 | + "struct", # 5 |
| 83 | + "typeParameter", # 6 |
| 84 | + "parameter", # 7 |
| 85 | + "variable", # 8 |
| 86 | + "property", # 9 |
| 87 | + "enumMember", # 10 |
| 88 | + "event", # 11 |
| 89 | + "function", # 12 |
| 90 | + "method", # 13 |
| 91 | + "macro", # 14 |
| 92 | + "keyword", # 15 |
| 93 | + "modifier", # 16 |
| 94 | + "comment", # 17 |
| 95 | + "string", # 18 |
| 96 | + "number", # 19 |
| 97 | + "regexp", # 20 |
| 98 | + "operator", # 21 |
| 99 | + "decorator", # 22 |
| 100 | + ], |
| 101 | + "tokenModifiers": [ |
| 102 | + "declaration", # bit 0 |
| 103 | + "definition", # bit 1 |
| 104 | + "readonly", # bit 2 |
| 105 | + "static", # bit 3 |
| 106 | + "deprecated", # bit 4 |
| 107 | + "abstract", # bit 5 |
| 108 | + "async", # bit 6 |
| 109 | + "modification", # bit 7 |
| 110 | + "documentation", # bit 8 |
| 111 | + "defaultLibrary", # bit 9 |
| 112 | + ], |
| 113 | +} |
| 114 | + |
| 115 | + |
| 116 | +def build_type_mapping(backend_legend: types.SemanticTokensLegend) -> dict[int, int]: |
| 117 | + """Build mapping from backend token type indices to canonical indices.""" |
| 118 | + mapping: dict[int, int] = {} |
| 119 | + for backend_idx, type_name in enumerate(backend_legend["tokenTypes"]): |
| 120 | + canonical_idx = _CANONICAL_TYPE_INDEX.get(type_name, -1) |
| 121 | + mapping[backend_idx] = canonical_idx |
| 122 | + return mapping |
| 123 | + |
| 124 | + |
| 125 | +def build_modifier_mapping( |
| 126 | + backend_legend: types.SemanticTokensLegend, |
| 127 | +) -> dict[int, int]: |
| 128 | + """Build mapping from backend modifier bit positions to canonical positions.""" |
| 129 | + mapping: dict[int, int] = {} |
| 130 | + for backend_bit, modifier_name in enumerate(backend_legend["tokenModifiers"]): |
| 131 | + canonical_bit = _CANONICAL_MODIFIER_INDEX.get(modifier_name, -1) |
| 132 | + mapping[backend_bit] = canonical_bit |
| 133 | + return mapping |
| 134 | + |
| 135 | + |
| 136 | +def normalize_tokens( |
| 137 | + tokens: types.SemanticTokens, |
| 138 | + type_map: dict[int, int], |
| 139 | + modifier_map: dict[int, int], |
| 140 | +) -> types.SemanticTokens: |
| 141 | + """Remap token indices to use canonical legend.""" |
| 142 | + data = tokens.get("data", []) |
| 143 | + if not data: |
| 144 | + return tokens |
| 145 | + |
| 146 | + # Each token is 5 integers: deltaLine, deltaStart, length, typeIndex, modifiers |
| 147 | + normalized_data: list[int] = [] |
| 148 | + |
| 149 | + for i in range(0, len(data), 5): |
| 150 | + if i + 4 >= len(data): |
| 151 | + break # Incomplete token data |
| 152 | + |
| 153 | + delta_line = data[i] |
| 154 | + delta_start = data[i + 1] |
| 155 | + length = data[i + 2] |
| 156 | + type_index = data[i + 3] |
| 157 | + modifier_bits = data[i + 4] |
| 158 | + |
| 159 | + # Remap token type index |
| 160 | + canonical_type = type_map.get(type_index, type_index) |
| 161 | + if canonical_type == -1: |
| 162 | + canonical_type = type_index # Keep original if unknown |
| 163 | + |
| 164 | + # Remap modifier bitmask |
| 165 | + canonical_modifiers = 0 |
| 166 | + for backend_bit, canonical_bit in modifier_map.items(): |
| 167 | + if modifier_bits & (1 << backend_bit): |
| 168 | + if canonical_bit >= 0: |
| 169 | + canonical_modifiers |= 1 << canonical_bit |
| 170 | + |
| 171 | + normalized_data.extend( |
| 172 | + [ |
| 173 | + delta_line, |
| 174 | + delta_start, |
| 175 | + length, |
| 176 | + canonical_type, |
| 177 | + canonical_modifiers, |
| 178 | + ] |
| 179 | + ) |
| 180 | + |
| 181 | + result: types.SemanticTokens = {"data": normalized_data} |
| 182 | + if "resultId" in tokens: |
| 183 | + result["resultId"] = tokens["resultId"] |
| 184 | + |
| 185 | + return result |
0 commit comments