diff --git a/ra-wasm/src/lib.rs b/ra-wasm/src/lib.rs index 645561d..aa6e568 100644 --- a/ra-wasm/src/lib.rs +++ b/ra-wasm/src/lib.rs @@ -16,14 +16,13 @@ use ide_db::{ SnippetCap, }, }; +use return_types::*; use wasm_bindgen::prelude::*; - -mod to_proto; +pub use wasm_bindgen_rayon::init_thread_pool; mod return_types; -use return_types::*; - -pub use wasm_bindgen_rayon::init_thread_pool; +mod semantic_tokens; +mod to_proto; #[wasm_bindgen(start)] pub fn start() { @@ -139,17 +138,6 @@ impl WorldState { let line_index = self.analysis().file_line_index(self.file_id).unwrap(); - let highlights: Vec<_> = self - .analysis() - .highlight(file_id) - .unwrap() - .into_iter() - .map(|hl| Highlight { - tag: Some(hl.highlight.tag.to_string()), - range: to_proto::text_range(hl.range, &line_index), - }) - .collect(); - let config = DiagnosticsConfig::default(); let diagnostics: Vec<_> = self @@ -171,7 +159,19 @@ impl WorldState { }) .collect(); - serde_wasm_bindgen::to_value(&UpdateResult { diagnostics, highlights }).unwrap() + serde_wasm_bindgen::to_value(&UpdateResult { diagnostics }).unwrap() + } + + pub fn semantic_tokens(&self) -> Vec { + log::warn!("semantic_tokens"); + // let mut builder = SemanticTokensBuilder::new(); + let line_index = self.analysis().file_line_index(self.file_id).unwrap(); + let file_text = self.analysis().file_text(self.file_id).unwrap(); + to_proto::semantic_tokens( + &file_text, + &line_index, + self.analysis().highlight(self.file_id).unwrap(), + ) } pub fn inlay_hints(&self) -> JsValue { diff --git a/ra-wasm/src/return_types.rs b/ra-wasm/src/return_types.rs index a712ab4..9ccce1b 100644 --- a/ra-wasm/src/return_types.rs +++ b/ra-wasm/src/return_types.rs @@ -62,7 +62,6 @@ pub struct TextEdit { #[derive(Serialize)] pub struct UpdateResult { pub diagnostics: Vec, - pub highlights: Vec, } #[derive(Serialize)] diff --git a/ra-wasm/src/semantic_tokens.rs b/ra-wasm/src/semantic_tokens.rs new file mode 100644 index 0000000..e00ec8e --- /dev/null +++ b/ra-wasm/src/semantic_tokens.rs @@ -0,0 +1,133 @@ +//! Semantic Tokens helpers +#![allow(non_camel_case_types)] + +use std::ops; + +use crate::return_types; + +#[repr(u8)] +#[allow(dead_code)] +pub(crate) enum SemanticTokenType { + COMMENT, + STRING, + KEYWORD, + NUMBER, + REGEXP, + OPERATOR, + NAMESPACE, + TYPE, + STRUCT, + CLASS, + INTERFACE, + ENUM, + TYPE_PARAMETER, + FUNCTION, + MEMBER, + MACRO, + VARIABLE, + PARAMETER, + PROPERTY, + LABEL, + UNSUPPORTED, +} + +macro_rules! define_semantic_token_modifiers { + ($($ident:ident),*$(,)?) => { + #[derive(PartialEq)] + pub(crate) enum SemanticTokenModifier { + $($ident),* + } + + pub(crate) const SUPPORTED_MODIFIERS: &[SemanticTokenModifier] = &[ + $(SemanticTokenModifier::$ident),* + ]; + }; +} + +define_semantic_token_modifiers![ + DOCUMENTATION, + DECLARATION, + DEFINITION, + STATIC, + ABSTRACT, + DEPRECATED, + READONLY, + DEFAULT_LIBRARY, + // custom + ASYNC, + ATTRIBUTE_MODIFIER, + CALLABLE, + CONSTANT, + CONSUMING, + CONTROL_FLOW, + CRATE_ROOT, + INJECTED, + INTRA_DOC_LINK, + LIBRARY, + MUTABLE, + PUBLIC, + REFERENCE, + TRAIT_MODIFIER, + UNSAFE, +]; + +#[derive(Default)] +pub(crate) struct ModifierSet(pub(crate) u32); + +impl ops::BitOrAssign for ModifierSet { + fn bitor_assign(&mut self, rhs: SemanticTokenModifier) { + let idx = SUPPORTED_MODIFIERS.iter().position(|it| it == &rhs).unwrap(); + self.0 |= 1 << idx; + } +} + +/// Tokens are encoded relative to each other. +/// +/// This is a direct port of +pub(crate) struct SemanticTokensBuilder { + prev_line: u32, + prev_char: u32, + data: Vec, +} + +impl SemanticTokensBuilder { + pub(crate) fn new() -> Self { + SemanticTokensBuilder { prev_line: 0, prev_char: 0, data: Vec::new() } + } + + /// Push a new token onto the builder + pub(crate) fn push( + &mut self, + range: return_types::Range, + token_index: u32, + modifier_bitset: u32, + ) { + let mut push_line = range.startLineNumber - 1; + let mut push_char = range.startColumn - 1; + + if !self.data.is_empty() { + push_line -= self.prev_line; + if push_line == 0 { + push_char -= self.prev_char; + } + } + + // A token cannot be multiline + let token_len = range.endColumn - range.startColumn; + + let token = [push_line, push_char, token_len, token_index, modifier_bitset]; + + self.data.extend_from_slice(&token); + + self.prev_line = range.startLineNumber - 1; + self.prev_char = range.startColumn - 1; + } + + pub(crate) fn build(self) -> Vec { + self.data + } +} + +pub(crate) fn type_index(ty: SemanticTokenType) -> u32 { + ty as u32 +} diff --git a/ra-wasm/src/to_proto.rs b/ra-wasm/src/to_proto.rs index 1c8c7cd..9b19921 100644 --- a/ra-wasm/src/to_proto.rs +++ b/ra-wasm/src/to_proto.rs @@ -1,5 +1,5 @@ //! Conversion of rust-analyzer specific types to return_types equivalents. -use crate::return_types; +use crate::{return_types, semantic_tokens}; pub(crate) fn text_range( range: ide::TextRange, @@ -229,3 +229,123 @@ fn markdown_string(s: &str) -> return_types::MarkdownString { return_types::MarkdownString { value: processed_lines.join("\n") } } + +pub(crate) type SemanticTokens = Vec; + +pub(crate) fn semantic_tokens( + text: &str, + line_index: &ide::LineIndex, + highlights: Vec, +) -> SemanticTokens { + let mut builder = semantic_tokens::SemanticTokensBuilder::new(); + + for highlight_range in highlights { + if highlight_range.highlight.is_empty() { + continue; + } + let (ty, mods) = semantic_token_type_and_modifiers(highlight_range.highlight); + let token_index = semantic_tokens::type_index(ty); + let modifier_bitset = mods.0; + + for mut text_range in line_index.lines(highlight_range.range) { + if text[text_range].ends_with('\n') { + text_range = ide::TextRange::new( + text_range.start(), + text_range.end() - ide::TextSize::of('\n'), + ); + } + let range = self::text_range(text_range, line_index); + + builder.push(range, token_index, modifier_bitset); + } + } + + builder.build() +} + +fn semantic_token_type_and_modifiers( + highlight: ide::Highlight, +) -> (semantic_tokens::SemanticTokenType, semantic_tokens::ModifierSet) { + use ide::{HlMod, HlTag, SymbolKind}; + use semantic_tokens::*; + let mut mods = ModifierSet::default(); + let type_ = match highlight.tag { + HlTag::Symbol(symbol) => match symbol { + SymbolKind::Module => SemanticTokenType::NAMESPACE, + SymbolKind::Impl => SemanticTokenType::TYPE, + SymbolKind::Field => SemanticTokenType::PROPERTY, + SymbolKind::TypeParam => SemanticTokenType::TYPE_PARAMETER, + SymbolKind::ConstParam => SemanticTokenType::PARAMETER, + SymbolKind::LifetimeParam => SemanticTokenType::TYPE_PARAMETER, + SymbolKind::Label => SemanticTokenType::LABEL, + SymbolKind::ValueParam => SemanticTokenType::PARAMETER, + SymbolKind::SelfParam => SemanticTokenType::KEYWORD, + SymbolKind::Local => SemanticTokenType::VARIABLE, + SymbolKind::Function => { + if highlight.mods.contains(HlMod::Associated) { + SemanticTokenType::MEMBER + } else { + SemanticTokenType::FUNCTION + } + } + SymbolKind::Const => { + mods |= SemanticTokenModifier::CONSTANT; + mods |= SemanticTokenModifier::STATIC; + SemanticTokenType::VARIABLE + } + SymbolKind::Static => { + mods |= SemanticTokenModifier::STATIC; + SemanticTokenType::VARIABLE + } + SymbolKind::Struct => SemanticTokenType::TYPE, + SymbolKind::Enum => SemanticTokenType::TYPE, + SymbolKind::Variant => SemanticTokenType::MEMBER, + SymbolKind::Union => SemanticTokenType::TYPE, + SymbolKind::TypeAlias => SemanticTokenType::TYPE, + SymbolKind::Trait => SemanticTokenType::INTERFACE, + SymbolKind::Macro => SemanticTokenType::MACRO, + }, + HlTag::Attribute => SemanticTokenType::UNSUPPORTED, + HlTag::BoolLiteral => SemanticTokenType::NUMBER, + HlTag::BuiltinAttr => SemanticTokenType::UNSUPPORTED, + HlTag::BuiltinType => SemanticTokenType::TYPE, + HlTag::ByteLiteral | HlTag::NumericLiteral => SemanticTokenType::NUMBER, + HlTag::CharLiteral => SemanticTokenType::STRING, + HlTag::Comment => SemanticTokenType::COMMENT, + HlTag::EscapeSequence => SemanticTokenType::NUMBER, + HlTag::FormatSpecifier => SemanticTokenType::MACRO, + HlTag::Keyword => SemanticTokenType::KEYWORD, + HlTag::None => SemanticTokenType::UNSUPPORTED, + HlTag::Operator(_op) => SemanticTokenType::OPERATOR, + HlTag::StringLiteral => SemanticTokenType::STRING, + HlTag::UnresolvedReference => SemanticTokenType::UNSUPPORTED, + HlTag::Punctuation(_punct) => SemanticTokenType::OPERATOR, + }; + + for modifier in highlight.mods.iter() { + let modifier = match modifier { + HlMod::Associated => continue, + HlMod::Async => SemanticTokenModifier::ASYNC, + HlMod::Attribute => SemanticTokenModifier::ATTRIBUTE_MODIFIER, + HlMod::Callable => SemanticTokenModifier::CALLABLE, + HlMod::Consuming => SemanticTokenModifier::CONSUMING, + HlMod::ControlFlow => SemanticTokenModifier::CONTROL_FLOW, + HlMod::CrateRoot => SemanticTokenModifier::CRATE_ROOT, + HlMod::DefaultLibrary => SemanticTokenModifier::DEFAULT_LIBRARY, + HlMod::Definition => SemanticTokenModifier::DECLARATION, + HlMod::Documentation => SemanticTokenModifier::DOCUMENTATION, + HlMod::Injected => SemanticTokenModifier::INJECTED, + HlMod::IntraDocLink => SemanticTokenModifier::INTRA_DOC_LINK, + HlMod::Library => SemanticTokenModifier::LIBRARY, + HlMod::Mutable => SemanticTokenModifier::MUTABLE, + HlMod::Public => SemanticTokenModifier::PUBLIC, + HlMod::Reference => SemanticTokenModifier::REFERENCE, + HlMod::Static => SemanticTokenModifier::STATIC, + HlMod::Trait => SemanticTokenModifier::TRAIT_MODIFIER, + HlMod::Unsafe => SemanticTokenModifier::UNSAFE, + }; + mods |= modifier; + } + + (type_, mods) +} diff --git a/www/example-code.rs b/www/example-code.rs index b43604d..8093dc3 100644 --- a/www/example-code.rs +++ b/www/example-code.rs @@ -1,9 +1,26 @@ use std::ops::Range; -fn gav(x: i32, y: i32) -> i64 { +unsafe fn gav(x: i32, y: i32) -> i64 { (x - y) * (x + y) } +#[derive(Debug)] +struct Gen { + g: &'a G +} + +impl Gen { + /// Create a new `Gen` + /// ``` + /// let mut gen = Gen::new(&mut something); + /// ``` + fn new(g: &mut G) -> Self { + Gen { g } + } + + fn do(&mut self) -> () { } +} + fn main() { let num = 5; let a = vec![1, 2, 3]; @@ -11,16 +28,20 @@ fn main() { let c = None; let d = Range { start: 1, end: num }; let e = 1..num; - let f = "sssss".to_string(); + let mut f = "sssss".to_string(); + let x = &mut f; for a in d { for b in e { - let c = gav(gav(a, b), a); + let c = unsafe { gav(gav(a, b), a) }; assert_eq!(gav(a, b), a * a - b * b); } } + + let mut gen = Gen::new(&mut f); let f = d .reduce(|a, b| { - println!("{}", a); + gen.do(); + println!("value: {}", a); a * b }) .unwrap(); diff --git a/www/index.js b/www/index.js index 42654c1..a1b3cec 100644 --- a/www/index.js +++ b/www/index.js @@ -55,13 +55,12 @@ import * as monaco from 'monaco-editor/esm/vs/editor/editor.api'; import exampleCode from './example-code.rs'; import './index.css'; -import { conf, grammar } from './rust-grammar'; +import { conf, grammar, semanticTokensLegend } from './rust-grammar'; import fake_std from './fake_std.rs'; import fake_core from './fake_core.rs'; import fake_alloc from './fake_alloc.rs'; var state; -var allTokens; self.MonacoEnvironment = { getWorkerUrl: () => './editor.worker.bundle.js', @@ -223,48 +222,16 @@ const registerRA = async () => { return await state.folding_ranges(); } }); - - class TokenState { - constructor(line = 0) { - this.line = line; - this.equals = () => true; - } - - clone() { - const res = new TokenState(this.line); - res.line += 1; - return res; - } - } - - function fixTag(tag) { - switch (tag) { - case 'builtin': return 'variable.predefined'; - case 'attribute': return 'key'; - case 'macro': return 'number.hex'; - case 'literal': return 'number'; - default: return tag; - } - } - - /*monaco.languages.setTokensProvider(modeId, { - getInitialState: () => new TokenState(), - tokenize(_, st) { - const filteredTokens = allTokens - .filter((token) => token.range.startLineNumber === st.line); - - const tokens = filteredTokens.map((token) => ({ - startIndex: token.range.startColumn - 1, - scopes: fixTag(token.tag), - })); - tokens.sort((a, b) => a.startIndex - b.startIndex); - - return { - tokens, - endState: new TokenState(st.line + 1), - }; + monaco.languages.registerDocumentSemanticTokensProvider(modeId, { + getLegend() { + return semanticTokensLegend; + }, + async provideDocumentSemanticTokens(model, lastResultId, token) { + let res = await state.semantic_tokens(); + return { data: res }; }, - });*/ + releaseDocumentSemanticTokens(resultId) {}, + }); }; @@ -318,8 +285,8 @@ const createRA = async () => { const start = async () => { var loadingText = document.createTextNode("Loading wasm..."); - document.body.appendChild(loadingText); - + document.body.appendChild(loadingText); + let model = monaco.editor.createModel(exampleCode, modeId); window.editor = monaco.editor; state = null; //await createRA(); @@ -327,20 +294,25 @@ const start = async () => { async function update() { const res = await state.update(model.getValue()); monaco.editor.setModelMarkers(model, modeId, res.diagnostics); - allTokens = res.highlights; } monaco.editor.defineTheme('vscode-dark-plus', { - base: 'vs-dark', + base: 'vs-dark', inherit: true, colors: { 'editorInlayHint.foreground': '#A0A0A0F0', 'editorInlayHint.background': '#11223300', }, rules: [ - { token: 'keyword.control', foreground: 'C586C0' }, - { token: 'variable', foreground: '9CDCFE' }, - { token: 'support.function', foreground: 'DCDCAA' }, + { token: 'keyword.control', foreground: 'C586C0' }, + { token: 'variable', foreground: '9CDCFE' }, + { token: 'support.function', foreground: 'DCDCAA' }, + { token: 'function', foreground: 'DCDCAA' }, + { token: 'member', foreground: 'DCDCAA' }, + { token: 'macro', foreground: '569CD6' }, + { token: 'typeParameter', foreground: '569CD6' }, + { token: 'variable.mutable', fontStyle: 'underline' }, + { token: 'parameter.mutable', fontStyle: 'underline' }, ], }); document.body.removeChild(loadingText); @@ -354,7 +326,8 @@ const start = async () => { initRA(); const myEditor = monaco.editor.create(document.body, { theme: 'vscode-dark-plus', - model: model + model: model, + 'semanticHighlighting.enabled': true, }); window.onresize = () => myEditor.layout(); diff --git a/www/rust-grammar.js b/www/rust-grammar.js index c0bd5be..5755a12 100644 --- a/www/rust-grammar.js +++ b/www/rust-grammar.js @@ -44,7 +44,7 @@ export const grammar = { 'continue', 'else', 'for', 'if', 'while', 'loop', 'match', ], - typeKeywords: [ + typeKeywords: [ 'Self', 'm32', 'm64', 'm128', 'f80', 'f16', 'f128', 'int', 'uint', 'float', 'char', 'bool', 'u8', 'u16', 'u32', 'u64', 'f32', 'f64', 'i8', 'i16', 'i32', 'i64', 'str', 'Option', 'Either', 'c_float', 'c_double', 'c_void', 'FILE', 'fpos_t', 'DIR', 'dirent', @@ -140,3 +140,54 @@ export const grammar = { ], }, }; + +export const semanticTokensLegend = { + tokenTypes: [ + 'comment', + 'string', + 'keyword', + 'number', + 'regexp', + 'operator', + 'namespace', + 'type', + 'struct', + 'class', + 'interface', + 'enum', + 'typeParameter', + 'function', + 'member', + 'macro', + 'variable', + 'parameter', + 'property', + 'label', + 'unsupported' + ], + tokenModifiers: [ + 'documentation', + 'declaration', + 'definition', + 'static', + 'abstract', + 'deprecated', + 'readonly', + 'default_library', + 'async', + 'attribute', + 'callable', + 'constant', + 'consuming', + 'controlFlow', + 'crateRoot', + 'injected', + 'intraDocLink', + 'library', + 'mutable', + 'public', + 'reference', + 'trait', + 'unsafe', + ], +};