chore(lsp): Add benchmark for performance on a large real-world repo (#23395)

This PR adds a benchmark intended to measure how the LSP handles larger
repos, as well as its performance on a more realistic workload.

The repo being benchmarked is
[deco-cx/apps](https://github.com/deco-cx/apps) which has been vendored
along with its dependencies. It's included as a git submodule as its
fairly large. The LSP requests used in the benchmark are the actual
requests sent by VSCode as I opened, modified, and navigated around a
file (to simulate an actual user interaction).

The main motivation is to have a more realistic benchmark that measures
how we do with a large number of files and dependencies. The
improvements made from 1.42 to 1.42.3 mostly improved performance with
larger repos, so none of our existing benchmarks showed an improvement.

Here are the results for the changes made from 1.42 to 1.42.3 (the new
benchmark is the last one listed):

**1.42.0**

```test
Starting Deno benchmark
-> Start benchmarking lsp
   - Simple Startup/Shutdown 
      (10 runs, mean: 379ms)
   - Big Document/Several Edits 
      (5 runs, mean: 1142ms)
   - Find/Replace
      (10 runs, mean: 51ms)
   - Code Lens
      (10 runs, mean: 443ms)
   - deco-cx/apps Multiple Edits + Navigation
      (5 runs, mean: 25121ms)
<- End benchmarking lsp
```

**1.42.3**

```text
Starting Deno benchmark
-> Start benchmarking lsp
   - Simple Startup/Shutdown 
      (10 runs, mean: 383ms)
   - Big Document/Several Edits 
      (5 runs, mean: 1135ms)
   - Find/Replace
      (10 runs, mean: 55ms)
   - Code Lens
      (10 runs, mean: 440ms)
   - deco-cx/apps Multiple Edits + Navigation
      (5 runs, mean: 11675ms)
<- End benchmarking lsp
```
This commit is contained in:
Nathan Whitaker 2024-04-16 12:26:51 -07:00 committed by GitHub
parent c4d0fceec3
commit 422cff1f24
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
7 changed files with 3054 additions and 1 deletions

View File

@ -19,6 +19,7 @@
".git",
"cli/bench/testdata/express-router.js",
"cli/bench/testdata/npm/",
"cli/bench/testdata/lsp_benchdata/",
"cli/tsc/dts/lib.d.ts",
"cli/tsc/dts/lib.scripthost.d.ts",
"cli/tsc/dts/lib.decorators*.d.ts",

3
.gitmodules vendored
View File

@ -9,3 +9,6 @@
[submodule "tests/node_compat/runner/suite"]
path = tests/node_compat/runner/suite
url = https://github.com/denoland/node_test.git
[submodule "cli/bench/testdata/lsp_benchdata"]
path = cli/bench/testdata/lsp_benchdata
url = https://github.com/denoland/deno_lsp_benchdata.git

View File

@ -9,11 +9,14 @@ use std::collections::HashMap;
use std::path::Path;
use std::time::Duration;
use test_util::lsp::LspClientBuilder;
use test_util::PathRef;
use tower_lsp::lsp_types as lsp;
static FIXTURE_CODE_LENS_TS: &str = include_str!("testdata/code_lens.ts");
static FIXTURE_DB_TS: &str = include_str!("testdata/db.ts");
static FIXTURE_DB_MESSAGES: &[u8] = include_bytes!("testdata/db_messages.json");
static FIXTURE_DECO_APPS: &[u8] =
include_bytes!("testdata/deco_apps_requests.json");
#[derive(Debug, Deserialize)]
enum FixtureType {
@ -36,6 +39,107 @@ struct FixtureMessage {
params: Value,
}
/// replaces the root directory in the URIs of the requests
/// with the given root path
fn patch_uris<'a>(
reqs: impl IntoIterator<Item = &'a mut tower_lsp::jsonrpc::Request>,
root: &PathRef,
) {
for req in reqs {
let mut params = req.params().unwrap().clone();
let new_req = if let Some(doc) = params.get_mut("textDocument") {
if let Some(uri_val) = doc.get_mut("uri") {
let uri = uri_val.as_str().unwrap();
*uri_val =
Value::from(uri.replace(
"file:///",
&format!("file://{}/", root.to_string_lossy()),
));
}
let builder = tower_lsp::jsonrpc::Request::build(req.method().to_owned());
let builder = if let Some(id) = req.id() {
builder.id(id.clone())
} else {
builder
};
Some(builder.params(params).finish())
} else {
None
};
if let Some(new_req) = new_req {
*req = new_req;
}
}
}
fn bench_deco_apps_edits(deno_exe: &Path) -> Duration {
let mut requests: Vec<tower_lsp::jsonrpc::Request> =
serde_json::from_slice(FIXTURE_DECO_APPS).unwrap();
let apps =
test_util::root_path().join("cli/bench/testdata/lsp_benchdata/apps");
// it's a bit wasteful to do this for every run, but it's the easiest with the way things
// are currently structured
patch_uris(&mut requests, &apps);
let mut client = LspClientBuilder::new()
.use_diagnostic_sync(false)
.set_root_dir(apps.clone())
.deno_exe(deno_exe)
.build();
client.initialize(|c| {
c.set_workspace_folders(vec![lsp_types::WorkspaceFolder {
uri: Url::from_file_path(&apps).unwrap(),
name: "apps".to_string(),
}]);
c.set_deno_enable(true);
c.set_unstable(true);
c.set_preload_limit(1000);
c.set_config(apps.join("deno.json").as_path().to_string_lossy());
});
let start = std::time::Instant::now();
let mut reqs = 0;
for req in requests {
if req.id().is_none() {
client.write_notification(req.method(), req.params());
} else {
reqs += 1;
client.write_jsonrpc(req.method(), req.params());
}
}
for _ in 0..reqs {
let _ = client.read_latest_response();
}
let end = start.elapsed();
// part of the motivation of including this benchmark is to see how we perform
// with a fairly large number of documents in memory.
// make sure that's the case
let res = client.write_request(
"deno/virtualTextDocument",
json!({
"textDocument": {
"uri": "deno:/status.md"
}
}),
);
let re = lazy_regex::regex!(r"Documents in memory: (\d+)");
let res = res.as_str().unwrap().to_string();
assert!(res.starts_with("# Deno Language Server Status"));
let captures = re.captures(&res).unwrap();
let count = captures.get(1).unwrap().as_str().parse::<usize>().unwrap();
assert!(count > 1000, "count: {}", count);
client.shutdown();
end
}
/// A benchmark that opens a 8000+ line TypeScript document, adds a function to
/// the end of the document and does a level of hovering and gets quick fix
/// code actions.
@ -309,6 +413,15 @@ pub fn benchmarks(deno_exe: &Path) -> HashMap<String, i64> {
println!(" ({} runs, mean: {}ms)", times.len(), mean);
exec_times.insert("code_lens".to_string(), mean);
println!(" - deco-cx/apps Multiple Edits + Navigation");
let mut times = Vec::new();
for _ in 0..5 {
times.push(bench_deco_apps_edits(deno_exe));
}
let mean =
(times.iter().sum::<Duration>() / times.len() as u32).as_millis() as i64;
println!(" ({} runs, mean: {}ms)", times.len(), mean);
println!("<- End benchmarking lsp");
exec_times

View File

@ -431,7 +431,11 @@ async fn main() -> Result<()> {
println!("Starting Deno benchmark");
let target_dir = test_util::target_dir();
let deno_exe = test_util::deno_exe_path().to_path_buf();
let deno_exe = if let Ok(p) = std::env::var("DENO_BENCH_EXE") {
PathBuf::from(p)
} else {
test_util::deno_exe_path().to_path_buf()
};
env::set_current_dir(test_util::root_path())?;
let mut new_data = BenchResult {

2905
cli/bench/testdata/deco_apps_requests.json vendored Normal file

File diff suppressed because it is too large Load Diff

1
cli/bench/testdata/lsp_benchdata vendored Submodule

@ -0,0 +1 @@
Subproject commit af4c6a1eee825f19d3b3cce74cfdd03ebe1a3b92

View File

@ -941,6 +941,21 @@ impl LspClient {
})
}
pub fn write_jsonrpc(
&mut self,
method: impl AsRef<str>,
params: impl Serialize,
) {
let value = json!({
"jsonrpc": "2.0",
"id": self.request_id,
"method": method.as_ref(),
"params": params,
});
self.write(value);
self.request_id += 1;
}
fn write(&mut self, value: Value) {
let value_str = value.to_string();
let msg = format!(
@ -1030,6 +1045,17 @@ impl LspClient {
})
}
pub fn read_latest_response(
&mut self,
) -> (u64, Option<Value>, Option<LspResponseError>) {
self.reader.read_message(|msg| match msg {
LspMessage::Response(id, val, err) => {
Some((*id, val.clone(), err.clone()))
}
_ => None,
})
}
pub fn write_response<V>(&mut self, id: u64, result: V)
where
V: Serialize,