Skip to content

Commit

Permalink
better merlin conditionals
Browse files Browse the repository at this point in the history
  • Loading branch information
dfgordon committed Aug 24, 2024
1 parent 8389c72 commit 89426d8
Show file tree
Hide file tree
Showing 32 changed files with 810 additions and 399 deletions.
8 changes: 8 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,14 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

## [3.0.2] - 2024-08-24

### Fixes

* Better handling of Merlin conditionals
* Disassembler identifies out of bounds branches as data
* Automatic unpacking uses both file system hints and actual data

## [3.0.1] - 2024-08-18

### Fixes
Expand Down
2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "a2kit"
version = "3.0.1"
version = "3.0.2"
edition = "2021"
readme = "README.md"
license = "MIT"
Expand Down
24 changes: 13 additions & 11 deletions src/bin/server-applesoft/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -230,7 +230,7 @@ fn main() -> Result<(), Box<dyn Error + Sync + Send>> {
}

// Main loop
while let Ok(msg) = connection.receiver.recv() {
loop {

// Gather data from analysis threads
if let Some(oldest) = tools.thread_handles.front() {
Expand All @@ -248,17 +248,19 @@ fn main() -> Result<(), Box<dyn Error + Sync + Send>> {
}

// Handle messages from the client
match msg {
lsp_server::Message::Notification(note) => {
notification::handle_notification(&connection,note,&mut tools);
}
lsp_server::Message::Request(req) => {
if request::handle_request(&connection, req, &mut tools) {
break;
if let Ok(msg) = connection.receiver.recv_timeout(std::time::Duration::from_millis(100)) {
match msg {
lsp_server::Message::Notification(note) => {
notification::handle_notification(&connection,note,&mut tools);
}
lsp_server::Message::Request(req) => {
if request::handle_request(&connection, req, &mut tools) {
break;
}
},
lsp_server::Message::Response(resp) => {
response::handle_response(&connection, resp, &mut tools);
}
},
lsp_server::Message::Response(resp) => {
response::handle_response(&connection, resp, &mut tools);
}
}
}
Expand Down
24 changes: 13 additions & 11 deletions src/bin/server-integerbasic/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -242,7 +242,7 @@ fn main() -> Result<(), Box<dyn Error + Sync + Send>> {
}

// Main loop
while let Ok(msg) = connection.receiver.recv() {
loop {

// Gather data from analysis threads
if let Some(oldest) = tools.thread_handles.front() {
Expand All @@ -260,17 +260,19 @@ fn main() -> Result<(), Box<dyn Error + Sync + Send>> {
}

// Handle messages from the client
match msg {
lsp_server::Message::Notification(note) => {
notification::handle_notification(&connection,note,&mut tools);
}
lsp_server::Message::Request(req) => {
if request::handle_request(&connection, req, &mut tools) {
break;
if let Ok(msg) = connection.receiver.recv_timeout(std::time::Duration::from_millis(100)) {
match msg {
lsp_server::Message::Notification(note) => {
notification::handle_notification(&connection,note,&mut tools);
}
lsp_server::Message::Request(req) => {
if request::handle_request(&connection, req, &mut tools) {
break;
}
},
lsp_server::Message::Response(resp) => {
response::handle_response(&connection, resp, &mut tools);
}
},
lsp_server::Message::Response(resp) => {
response::handle_response(&connection, resp, &mut tools);
}
}
}
Expand Down
94 changes: 69 additions & 25 deletions src/bin/server-merlin/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,11 @@ use std::collections::HashMap;
use std::collections::VecDeque;
use std::error::Error;
use std::sync::{Arc,Mutex};
use a2kit::lang::server::Analysis;
use a2kit::lang::server::{Analysis,Checkpoint};
use a2kit::lang::server::TOKEN_TYPES; // used if we register tokens on server side
use a2kit::lang::merlin;
use a2kit::lang::merlin::diagnostics::Analyzer;
use a2kit::lang::merlin::checkpoint::CheckpointManager;
use a2kit::lang::disk_server::DiskServer;

mod notification;
Expand Down Expand Up @@ -52,7 +54,8 @@ struct AnalysisResult {
diagnostics: Vec<lsp::Diagnostic>,
folding: Vec<lsp::FoldingRange>,
symbols: merlin::Symbols,
workspace: merlin::Workspace
workspace: merlin::Workspace,
forced: bool
}

/// Send log messages to the client.
Expand Down Expand Up @@ -108,6 +111,18 @@ fn request_configuration(connection: &lsp_server::Connection) -> Result<(),Box<d
}
}

fn refresh_semantic_highlights(connection: &lsp_server::Connection) -> Result<(),Box<dyn Error>> {
let req = lsp_server::Request::new::<Option<usize>>(
lsp_server::RequestId::from("merlin6502-refresh-tokens".to_string()),
lsp::request::SemanticTokensRefresh::METHOD.to_string(),
None
);
match connection.sender.send(req.into()) {
Ok(()) => Ok(()),
Err(e) => Err(Box::new(e))
}
}

/// parse the response to the configuration request
fn parse_configuration(resp: lsp_server::Response) -> Result<merlin::settings::Settings,Box<dyn Error>> {
if let Some(result) = resp.result {
Expand All @@ -125,29 +140,42 @@ fn parse_configuration(resp: lsp_server::Response) -> Result<merlin::settings::S
Err(Box::new(ServerError::Parsing))
}

fn launch_analysis_thread(analyzer: Arc<Mutex<Analyzer>>, doc: a2kit::lang::Document, ws_scan: WorkspaceScanMethod) -> std::thread::JoinHandle<Option<AnalysisResult>> {
fn launch_analysis_thread(analyzer: Arc<Mutex<Analyzer>>, doc: a2kit::lang::Document, ws_scan: WorkspaceScanMethod, chks: &HashMap<String,CheckpointManager>) -> std::thread::JoinHandle<Option<AnalysisResult>> {
let checkpoints = match ws_scan {
WorkspaceScanMethod::FullUpdate => {
let mut ans = Vec::new();
for chk in chks.values() {
ans.push(chk.get_doc());
}
ans
},
_ => Vec::new()
};
std::thread::spawn( move || {
match analyzer.lock() {
Ok(mut analyzer) => {
let maybe_gather = match ws_scan {
WorkspaceScanMethod::UseCheckpoints => Some(false),
WorkspaceScanMethod::FullUpdate => Some(true),
_ => None
};
if let Some(gather) = maybe_gather {
match analyzer.rescan_workspace(gather) {
Ok(()) => {},
Err(_) => {}
let forced = match ws_scan {
WorkspaceScanMethod::None => false,
WorkspaceScanMethod::UseCheckpoints => {
match analyzer.rescan_workspace(false) {
_ => false
}
},
WorkspaceScanMethod::FullUpdate => {
match analyzer.rescan_workspace_and_update(checkpoints) {
_ => true
}
}
}
};
match analyzer.analyze(&doc) {
Ok(()) => Some(AnalysisResult {
uri: doc.uri.clone(),
version: doc.version,
diagnostics: analyzer.get_diags(&doc),
folding: analyzer.get_folds(&doc),
symbols: analyzer.get_symbols(),
workspace: analyzer.get_workspace().clone()
workspace: analyzer.get_workspace().clone(),
forced
}),
Err(_) => None
}
Expand Down Expand Up @@ -287,6 +315,17 @@ fn main() -> Result<(), Box<dyn Error + Sync + Send>> {
more_trigger_character: Some(vec![";".to_string()])
}),
folding_range_provider: Some(lsp::FoldingRangeProviderCapability::Simple(true)),
semantic_tokens_provider: Some(lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(lsp::SemanticTokensOptions {
work_done_progress_options: lsp::WorkDoneProgressOptions {
work_done_progress: None
},
legend: lsp::SemanticTokensLegend {
token_types: TOKEN_TYPES.iter().map(|x| lsp::SemanticTokenType::new(x)).collect(),
token_modifiers: vec![]
},
range: None,
full: Some(lsp::SemanticTokensFullOptions::Bool(true))
})),
..lsp::ServerCapabilities::default()
},
server_info: Some(lsp::ServerInfo {
Expand Down Expand Up @@ -337,7 +376,7 @@ fn main() -> Result<(), Box<dyn Error + Sync + Send>> {
}

// Main loop
while let Ok(msg) = connection.receiver.recv() {
loop {

// Gather data from analysis threads
if let Some(oldest) = tools.thread_handles.front() {
Expand All @@ -357,22 +396,27 @@ fn main() -> Result<(), Box<dyn Error + Sync + Send>> {
tools.assembler.use_shared_symbols(chkpt.shared_symbols());
}
push_diagnostics(&connection, result.uri, result.version, result.diagnostics);
if result.forced {
refresh_semantic_highlights(&connection).expect("refresh request failed");
}
}
}
}

// Handle messages from the client
match msg {
lsp_server::Message::Notification(note) => {
notification::handle_notification(&connection,note,&mut tools);
}
lsp_server::Message::Request(req) => {
if request::handle_request(&connection, req, &mut tools) {
break;
if let Ok(msg) = connection.receiver.recv_timeout(std::time::Duration::from_millis(100)) {
match msg {
lsp_server::Message::Notification(note) => {
notification::handle_notification(&connection,note,&mut tools);
}
lsp_server::Message::Request(req) => {
if request::handle_request(&connection, req, &mut tools) {
break;
}
},
lsp_server::Message::Response(resp) => {
response::handle_response(&connection, resp, &mut tools);
}
},
lsp_server::Message::Response(resp) => {
response::handle_response(&connection, resp, &mut tools);
}
}
}
Expand Down
9 changes: 6 additions & 3 deletions src/bin/server-merlin/notification.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,8 @@ pub fn handle_notification(
version: Some(params.text_document.version),
text: params.text_document.text
},
crate::WorkspaceScanMethod::FullUpdate
crate::WorkspaceScanMethod::FullUpdate,
&tools.doc_chkpts
);
tools.thread_handles.push_back(handle);
}
Expand All @@ -52,7 +53,8 @@ pub fn handle_notification(
version: None,
text
},
crate::WorkspaceScanMethod::FullUpdate
crate::WorkspaceScanMethod::FullUpdate,
&tools.doc_chkpts
);
tools.thread_handles.push_back(handle);
}
Expand Down Expand Up @@ -83,7 +85,8 @@ pub fn handle_notification(
version: Some(params.text_document.version),
text: change.text
},
crate::WorkspaceScanMethod::UseCheckpoints
crate::WorkspaceScanMethod::UseCheckpoints,
&tools.doc_chkpts
);
tools.thread_handles.push_back(handle);
}
Expand Down
57 changes: 31 additions & 26 deletions src/bin/server-merlin/request.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,8 @@ use lsp_server::{Connection,RequestId,Response};
use serde_json;
use std::collections::HashMap;
use std::sync::Arc;
use a2kit::lang::server::{Checkpoint, Tokens};
use a2kit::lang::server::Checkpoint;
// use a2kit::lang::server::Tokens; // used if we register tokens on client side
use a2kit::lang::{disk_server, merlin, normalize_client_uri, normalize_client_uri_str};
use a2kit::lang::merlin::formatter;
use a2kit::lang::merlin::disassembly::DasmRange;
Expand Down Expand Up @@ -41,6 +42,7 @@ pub fn handle_request(
lsp::request::HoverRequest::METHOD => Checkpoint::hover_response(chkpts, &mut tools.hover_provider, req.clone(), &mut resp),
lsp::request::Completion::METHOD => Checkpoint::completion_response(chkpts, &mut tools.completion_provider, req.clone(), &mut resp),
lsp::request::FoldingRangeRequest::METHOD => Checkpoint::folding_range_response(chkpts, req.clone(), &mut resp),
lsp::request::SemanticTokensFullRequest::METHOD => Checkpoint::sem_tok_response(chkpts, &mut tools.highlighter, req.clone(), &mut resp),

lsp::request::Shutdown::METHOD => {
logger(&connection,"shutdown request");
Expand Down Expand Up @@ -101,7 +103,8 @@ pub fn handle_request(
let handle = launch_analysis_thread(
Arc::clone(&tools.analyzer),
chk.get_doc(),
crate::WorkspaceScanMethod::FullUpdate
crate::WorkspaceScanMethod::FullUpdate,
&tools.doc_chkpts
);
tools.thread_handles.push_back(handle);
}
Expand All @@ -118,7 +121,8 @@ pub fn handle_request(
let handle = launch_analysis_thread(
Arc::clone(&tools.analyzer),
chk.get_doc(),
crate::WorkspaceScanMethod::FullUpdate
crate::WorkspaceScanMethod::FullUpdate,
&tools.doc_chkpts
);
tools.thread_handles.push_back(handle);
}
Expand All @@ -135,36 +139,37 @@ pub fn handle_request(
let handle = launch_analysis_thread(
Arc::clone(&tools.analyzer),
chk.get_doc(),
crate::WorkspaceScanMethod::UseCheckpoints
crate::WorkspaceScanMethod::UseCheckpoints,
&tools.doc_chkpts
);
tools.thread_handles.push_back(handle);
}
resp = lsp_server::Response::new_ok(req.id,serde_json::Value::Null);
}
}
},
"merlin6502.semantic.tokens" => {
if params.arguments.len()==2 {
let prog_res = serde_json::from_value::<String>(params.arguments[0].clone());
let uri_res = serde_json::from_value::<String>(params.arguments[1].clone());
if let (Ok(program),Ok(uri)) = (prog_res,uri_res) {
let normalized_uri = normalize_client_uri_str(&uri).expect("could not parse URI");
if let Some(chk) = tools.doc_chkpts.get(&normalized_uri.to_string()) {
tools.highlighter.use_shared_symbols(chk.shared_symbols());
} else {
// need to clear symbols if there is no checkpoint
tools.highlighter.use_shared_symbols(Arc::new(a2kit::lang::merlin::Symbols::new()));
}
// decision here is to highlight even if no symbols found
resp = match tools.highlighter.get(&program) {
Ok(result) => {
lsp_server::Response::new_ok(req.id,result)
},
Err(_) => lsp_server::Response::new_err(req.id,PARSE_ERROR,"semantic tokens failed".to_string())
};
}
}
},
// "merlin6502.semantic.tokens" => {
// if params.arguments.len()==2 {
// let prog_res = serde_json::from_value::<String>(params.arguments[0].clone());
// let uri_res = serde_json::from_value::<String>(params.arguments[1].clone());
// if let (Ok(program),Ok(uri)) = (prog_res,uri_res) {
// let normalized_uri = normalize_client_uri_str(&uri).expect("could not parse URI");
// if let Some(chk) = tools.doc_chkpts.get(&normalized_uri.to_string()) {
// tools.highlighter.use_shared_symbols(chk.shared_symbols());
// } else {
// // need to clear symbols if there is no checkpoint
// tools.highlighter.use_shared_symbols(Arc::new(a2kit::lang::merlin::Symbols::new()));
// }
// // decision here is to highlight even if no symbols found
// resp = match tools.highlighter.get(&program) {
// Ok(result) => {
// lsp_server::Response::new_ok(req.id,result)
// },
// Err(_) => lsp_server::Response::new_err(req.id,PARSE_ERROR,"semantic tokens failed".to_string())
// };
// }
// }
// },
"merlin6502.pasteFormat" => {
if params.arguments.len()==2 {
let prog_res = serde_json::from_value::<String>(params.arguments[0].clone());
Expand Down
Loading

0 comments on commit 89426d8

Please sign in to comment.