1
use std::sync::Arc;
2
use std::sync::RwLock;
3

            
4
use conjure_cp_core::context::Context;
5
use conjure_cp_essence_parser::RecoverableParseError;
6
use conjure_cp_essence_parser::diagnostics::diagnostics_api::Diagnostic;
7
use conjure_cp_essence_parser::diagnostics::error_detection::collect_errors::error_to_diagnostic;
8
use conjure_cp_essence_parser::diagnostics::source_map::SourceMap;
9
use conjure_cp_essence_parser::parse_essence_with_context_and_map;
10
use conjure_cp_essence_parser::util::get_tree;
11
use tower_lsp::{lsp_types::Diagnostic as LspDiagnostic, lsp_types::*};
12

            
13
use conjure_cp_essence_parser::diagnostics::diagnostics_api::Diagnostic as ParserDiagnostic;
14
use conjure_cp_essence_parser::diagnostics::diagnostics_api::Position as ParserPosition;
15
use conjure_cp_essence_parser::diagnostics::diagnostics_api::Range as ParserRange;
16
use conjure_cp_essence_parser::diagnostics::diagnostics_api::Severity as ParserSeverity;
17
use tower_lsp::lsp_types::Position as LspPosition;
18
use tower_lsp::lsp_types::Range as LspRange;
19

            
20
use tree_sitter::Point;
21
use tree_sitter::Tree;
22

            
23
use crate::handlers::cache::CacheCont;
24
use crate::server::Backend;
25

            
26
// parse debounce is the amount of time to wait after a document change before re-parsing and updating diagnostics
27
// helps avoid excessive parsing on rapid successive edits
28
const PARSE_DEBOUNCE_MS: u64 = 120;
29

            
30
impl Backend {
31
    pub async fn handle_did_open(&self, params: DidOpenTextDocumentParams) {
32
        //on open, check whether cache has existing entry, if not, add to cache
33

            
34
        let uri = params.text_document.uri;
35
        let text = params.text_document.text.clone();
36

            
37
        let lsp_cache = &self.lsp_cache;
38
        //basically look to see if in cache and if not in cache, fetch from source
39
        //the closure? only runs on a cache miss
40
        let cache_content = lsp_cache
41
            .get_with(uri.clone(), async {
42
                let (cst_tree, _) = get_tree(&text).unwrap();
43

            
44
                let context = Arc::new(RwLock::new(Context::default()));
45
                let mut errors: Vec<RecoverableParseError> = Vec::new();
46

            
47
                let parsed = parse_essence_with_context_and_map(
48
                    &text,
49
                    context,
50
                    &mut errors,
51
                    Some(&cst_tree),
52
                );
53

            
54
                match parsed {
55
                    Ok((Some(ast_model), source_map)) => CacheCont {
56
                        sourcemap: Some(source_map),
57
                        ast: Some(ast_model),
58
                        errors,
59
                        cst: Some(cst_tree),
60
                        contents: text.clone(),
61
                        version: params.text_document.version,
62
                    },
63
                    Ok((None, source_map)) => CacheCont {
64
                        sourcemap: Some(source_map),
65
                        ast: None,
66
                        errors,
67
                        cst: Some(cst_tree),
68
                        contents: text.clone(),
69
                        version: params.text_document.version,
70
                    },
71
                    Err(fatal) => CacheCont {
72
                        sourcemap: None,
73
                        ast: None,
74
                        errors: vec![RecoverableParseError::new(fatal.to_string(), None)],
75
                        cst: Some(cst_tree),
76
                        contents: text.clone(),
77
                        version: params.text_document.version,
78
                    },
79
                } //this inserts the cache created above into the cache
80
            })
81
            .await;
82

            
83
        self.client
84
            .log_message(MessageType::INFO, "Did open document")
85
            .await;
86

            
87
        //diagnostic stuff here
88
        publish_diagnostics(&self.client, &uri.clone(), cache_content).await;
89
    }
90
    pub async fn handle_did_save(&self, params: DidSaveTextDocumentParams) {
91
        // Diagnostics are driven by did_change. Re-publishing cached diagnostics on save can
92
        // race with in-flight did_change parsing and temporarily re-show stale diagnostics.
93
        let uri = params.text_document.uri;
94
        let _ = uri; // keep param usage explicit for now
95
        self.client
96
            .log_message(MessageType::INFO, "Did save document")
97
            .await;
98
    }
99
    pub async fn handle_did_change(&self, params: DidChangeTextDocumentParams) {
100
        let uri = params.text_document.uri;
101
        let incoming_version = params.text_document.version;
102
        let lsp_cache = &self.lsp_cache;
103

            
104
        self.client
105
            .log_message(MessageType::INFO, "in document change")
106
            .await;
107

            
108
        let Some(cache_conts) = lsp_cache.get(&uri).await else {
109
            self.client
110
                .log_message(MessageType::WARNING, "DidChange for uncached document")
111
                .await;
112
            return;
113
        };
114

            
115
        // Drop stale/out-of-order changes.
116
        if incoming_version <= cache_conts.version {
117
            return;
118
        }
119

            
120
        let mut new_text = cache_conts.contents.clone();
121
        let mut edited_tree = cache_conts.cst.clone();
122
        let mut provisional_sourcemap = cache_conts.sourcemap.clone();
123

            
124
        // LSP may send multiple incremental edits in one notification.
125
        for change in &params.content_changes {
126
            if let Some(lsp_range) = change.range {
127
                let start_byte = position_to_byte(&new_text, lsp_range.start);
128
                let old_end_byte = position_to_byte(&new_text, lsp_range.end);
129

            
130
                if start_byte > old_end_byte || old_end_byte > new_text.len() {
131
                    self.client
132
                        .log_message(
133
                            MessageType::WARNING,
134
                            "Ignoring invalid edit range in DidChange",
135
                        )
136
                        .await;
137
                    continue;
138
                }
139

            
140
                let start_position = position_to_treesitter_point(&new_text, lsp_range.start);
141
                let old_end_position = position_to_treesitter_point(&new_text, lsp_range.end);
142
                let new_end_position = calculate_new_end_position(&change.text, start_position);
143
                let new_end_byte = start_byte + change.text.len();
144

            
145
                if let Some(tree) = edited_tree.as_mut() {
146
                    tree.edit(&tree_sitter::InputEdit {
147
                        start_byte,
148
                        old_end_byte,
149
                        new_end_byte,
150
                        start_position,
151
                        old_end_position,
152
                        new_end_position,
153
                    });
154
                }
155

            
156
                if let Some(map) = provisional_sourcemap.as_mut() {
157
                    shift_sourcemap_after_edit(map, start_byte, old_end_byte, new_end_byte);
158
                }
159

            
160
                new_text.replace_range(start_byte..old_end_byte, &change.text);
161
            } else {
162
                // Full content replacement.
163
                new_text = change.text.clone();
164
                edited_tree = None;
165
                provisional_sourcemap = None;
166
            }
167
        }
168

            
169
        let mut new_tree: Option<Tree> = if let Some(ref old_tree) = edited_tree {
170
            let mut parser = tree_sitter::Parser::new();
171
            parser
172
                .set_language(&tree_sitter_essence::LANGUAGE.into())
173
                .unwrap();
174
            parser.parse(&new_text, Some(old_tree))
175
        } else {
176
            None
177
        };
178

            
179
        if new_tree.is_none() {
180
            new_tree = get_tree(&new_text).map(|(tree, _)| tree);
181
        }
182

            
183
        // store updated text/tree IMMEDIATELY so subsequent incremental edits are based on
184
        // the latest document state, then parse & diagnose in a debounced task
185
        let provisional = CacheCont {
186
            sourcemap: provisional_sourcemap,
187
            ast: cache_conts.ast.clone(),
188
            errors: cache_conts.errors.clone(),
189
            cst: new_tree.clone(),
190
            contents: new_text.clone(),
191
            version: incoming_version,
192
        };
193
        lsp_cache.insert(uri.clone(), provisional).await;
194

            
195
        let lsp_cache = lsp_cache.clone();
196
        let client = self.client.clone();
197
        tokio::spawn(async move {
198
            tokio::time::sleep(std::time::Duration::from_millis(PARSE_DEBOUNCE_MS)).await;
199

            
200
            let Some(current) = lsp_cache.get(&uri).await else {
201
                return;
202
            };
203

            
204
            // only parse the newest queued version
205
            if current.version != incoming_version {
206
                return;
207
            }
208

            
209
            let context = Arc::new(RwLock::new(Context::default()));
210
            let mut errors: Vec<RecoverableParseError> = Vec::new();
211
            let parsed = parse_essence_with_context_and_map(
212
                &current.contents,
213
                context,
214
                &mut errors,
215
                current.cst.as_ref(),
216
            );
217

            
218
            let parsed_cache = match parsed {
219
                Ok((Some(ast_model), source_map)) => CacheCont {
220
                    sourcemap: Some(source_map),
221
                    ast: Some(ast_model),
222
                    errors,
223
                    cst: current.cst.clone(),
224
                    contents: current.contents.clone(),
225
                    version: incoming_version,
226
                },
227
                Ok((None, source_map)) => CacheCont {
228
                    sourcemap: Some(source_map),
229
                    ast: None,
230
                    errors,
231
                    cst: current.cst.clone(),
232
                    contents: current.contents.clone(),
233
                    version: incoming_version,
234
                },
235
                Err(fatal) => CacheCont {
236
                    sourcemap: None,
237
                    ast: None,
238
                    errors: vec![RecoverableParseError::new(fatal.to_string(), None)],
239
                    cst: current.cst.clone(),
240
                    contents: current.contents.clone(),
241
                    version: incoming_version,
242
                },
243
            };
244

            
245
            if let Err(err) = client.semantic_tokens_refresh().await {
246
                client
247
                    .log_message(
248
                        MessageType::WARNING,
249
                        format!("semantic_tokens_refresh failed on change: {err}"),
250
                    )
251
                    .await;
252
            }
253

            
254
            if let Some(latest) = lsp_cache.get(&uri).await
255
                && latest.version != incoming_version
256
            {
257
                return;
258
            }
259
            lsp_cache.insert(uri.clone(), parsed_cache.clone()).await;
260

            
261
            publish_diagnostics(&client, &uri, parsed_cache).await;
262
        });
263
    }
264
}
265

            
266
async fn publish_diagnostics(client: &tower_lsp::Client, uri: &Url, cache_conts: CacheCont) {
267
    // Build diagnostics from the parse errors cached for this document.
268
    // parse_essence_with_context_and_map already produces both syntactic and semantic errors.
269
    let diagnostics: Vec<Diagnostic> = cache_conts
270
        .errors
271
        .into_iter()
272
        .map(|err| error_to_diagnostic(&err))
273
        .collect();
274

            
275
    let lsp_diagnostics = convert_diagnostics(diagnostics);
276

            
277
    client
278
        .publish_diagnostics(uri.clone(), lsp_diagnostics, None)
279
        .await;
280
}
281
// convert diagnostics from cp-essence-parser to LSP diagnostics
282
pub fn convert_diagnostics(diagnostics: Vec<ParserDiagnostic>) -> Vec<LspDiagnostic> {
283
    // map each ParserDiagnostic to LspDiagnostic
284
    diagnostics
285
        .into_iter()
286
        .map(|diag| {
287
            LspDiagnostic {
288
                range: parser_to_lsp_range(diag.range),
289
                severity: match diag.severity {
290
                    ParserSeverity::Error => Some(tower_lsp::lsp_types::DiagnosticSeverity::ERROR),
291
                    ParserSeverity::Warn => Some(tower_lsp::lsp_types::DiagnosticSeverity::WARNING),
292
                    ParserSeverity::Info => {
293
                        Some(tower_lsp::lsp_types::DiagnosticSeverity::INFORMATION)
294
                    }
295
                    ParserSeverity::Hint => Some(tower_lsp::lsp_types::DiagnosticSeverity::HINT),
296
                },
297
                code: None,             // for now
298
                code_description: None, // also for now
299
                source: Some(diag.source.to_string()),
300
                message: diag.message,
301
                related_information: None,
302
                tags: None,
303
                data: None,
304
            }
305
        })
306
        .collect()
307
}
308

            
309
// playing that position converts properly
310
pub fn parser_to_lsp_range(range: ParserRange) -> LspRange {
311
    LspRange {
312
        start: parser_to_lsp_position(range.start),
313
        end: parser_to_lsp_position(range.end),
314
    }
315
}
316

            
317
pub fn parser_to_lsp_position(position: ParserPosition) -> LspPosition {
318
    LspPosition {
319
        line: position.line,
320
        character: position.character,
321
    }
322
}
323

            
324
//need to convert from character and line to byte value in a file
325
pub fn position_to_byte(text: &str, position: Position) -> usize {
326
    let row = position.line as usize;
327
    let line_start = line_start_byte(text.as_bytes(), row);
328
    let line_end = text[line_start..]
329
        .find('\n')
330
        .map(|off| line_start + off)
331
        .unwrap_or(text.len());
332
    let line_text = &text[line_start..line_end];
333
    let col_bytes = utf16_col_to_byte(line_text, position.character as usize);
334
    line_start + col_bytes
335
}
336

            
337
//need to convert from character and line to row and line
338
//this allows for incremental editing of treesitter
339
fn position_to_treesitter_point(text: &str, position: Position) -> Point {
340
    let row = position.line as usize;
341
    let line_start = line_start_byte(text.as_bytes(), row);
342
    let absolute = position_to_byte(text, position);
343
    Point::new(row, absolute.saturating_sub(line_start))
344
}
345

            
346
fn calculate_new_end_position(inserted_text: &str, start: Point) -> Point {
347
    let bytes = inserted_text.as_bytes();
348
    let newline_count = bytes.iter().filter(|&&b| b == b'\n').count();
349

            
350
    if newline_count == 0 {
351
        return Point::new(start.row, start.column + bytes.len());
352
    }
353

            
354
    let last_newline = bytes.iter().rposition(|&b| b == b'\n').unwrap_or(0);
355
    let trailing_bytes = bytes.len().saturating_sub(last_newline + 1);
356
    Point::new(start.row + newline_count, trailing_bytes)
357
}
358

            
359
fn line_start_byte(source: &[u8], row: usize) -> usize {
360
    let mut current_row = 0usize;
361
    let mut line_start = 0usize;
362
    for (idx, b) in source.iter().enumerate() {
363
        if current_row == row {
364
            break;
365
        }
366
        if *b == b'\n' {
367
            current_row += 1;
368
            line_start = idx + 1;
369
        }
370
    }
371
    line_start
372
}
373

            
374
fn utf16_col_to_byte(line: &str, utf16_col: usize) -> usize {
375
    let mut units = 0usize;
376
    for (idx, ch) in line.char_indices() {
377
        if units >= utf16_col {
378
            return idx;
379
        }
380
        let next = units + ch.len_utf16();
381
        if next > utf16_col {
382
            return idx;
383
        }
384
        units = next;
385
    }
386
    line.len()
387
}
388

            
389
fn shift_sourcemap_after_edit(
390
    source_map: &mut SourceMap,
391
    start_byte: usize,
392
    old_end_byte: usize,
393
    new_end_byte: usize,
394
) {
395
    let delta = new_end_byte as isize - old_end_byte as isize;
396

            
397
    for span in &mut source_map.spans {
398
        if span.end_byte <= start_byte {
399
            continue;
400
        }
401

            
402
        if span.start_byte >= old_end_byte {
403
            span.start_byte = shift_byte(span.start_byte, delta);
404
            span.end_byte = shift_byte(span.end_byte, delta);
405
            continue;
406
        }
407

            
408
        // if the edited region intersects this span, invalidate it
409
        //  until the debounced full parse
410
        span.start_byte = 0;
411
        span.end_byte = 0;
412
        span.hover_info = None;
413
    }
414

            
415
    source_map.by_byte = Default::default();
416
    for (idx, span) in source_map.spans.iter().enumerate() {
417
        if span.start_byte < span.end_byte {
418
            source_map
419
                .by_byte
420
                .insert(span.start_byte..span.end_byte, idx as u32);
421
        }
422
    }
423
}
424

            
425
// helpr to shift a byte position
426
fn shift_byte(byte: usize, delta: isize) -> usize {
427
    if delta >= 0 {
428
        byte.saturating_add(delta as usize)
429
    } else {
430
        byte.saturating_sub((-delta) as usize)
431
    }
432
}