Skip to content

Commit

Permalink
Merge branch 'main' into feat/geometry
Browse files Browse the repository at this point in the history
  • Loading branch information
lucacasonato committed Jan 31, 2024
2 parents da975e3 + 1b83b84 commit 3343fa4
Show file tree
Hide file tree
Showing 62 changed files with 668 additions and 613 deletions.
270 changes: 117 additions & 153 deletions .github/workflows/ci.generate.ts

Large diffs are not rendered by default.

188 changes: 89 additions & 99 deletions .github/workflows/ci.yml

Large diffs are not rendered by default.

5 changes: 3 additions & 2 deletions bench_util/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,10 @@
Example:

```rust
use deno_bench_util::bench_js_sync;
use deno_bench_util::bench_or_profile;
use deno_bench_util::bencher::{benchmark_group, Bencher};
use deno_bench_util::bench_js_sync};
use deno_bench_util::bencher::benchmark_group;
use deno_bench_util::bencher::Bencher;

use deno_core::Extension;

Expand Down
33 changes: 28 additions & 5 deletions cli/factory.rs
Original file line number Diff line number Diff line change
Expand Up @@ -293,15 +293,38 @@ impl CliFactory {
}

pub fn maybe_lockfile(&self) -> &Option<Arc<Mutex<Lockfile>>> {
fn check_no_npm(lockfile: &Mutex<Lockfile>, options: &CliOptions) -> bool {
if options.no_npm() {
return true;
}
// Deno doesn't yet understand npm workspaces and the package.json resolution
// may be in a different folder than the deno.json/lockfile. So for now, ignore
// any package.jsons that are in different folders
options
.maybe_package_json()
.as_ref()
.map(|package_json| {
package_json.path.parent() != lockfile.lock().filename.parent()
})
.unwrap_or(false)
}

self.services.lockfile.get_or_init(|| {
let maybe_lockfile = self.options.maybe_lockfile();

// initialize the lockfile with the workspace's configuration
if let Some(lockfile) = &maybe_lockfile {
let package_json_deps = self
.package_json_deps_provider()
.reqs()
.map(|reqs| reqs.into_iter().map(|s| format!("npm:{}", s)).collect())
let no_npm = check_no_npm(lockfile, &self.options);
let package_json_deps = (!no_npm)
.then(|| {
self
.package_json_deps_provider()
.reqs()
.map(|reqs| {
reqs.into_iter().map(|s| format!("npm:{}", s)).collect()
})
.unwrap_or_default()
})
.unwrap_or_default();
let mut lockfile = lockfile.lock();
let config = match self.options.maybe_workspace_config() {
Expand Down Expand Up @@ -352,7 +375,7 @@ impl CliFactory {
};
lockfile.set_workspace_config(
deno_lockfile::SetWorkspaceConfigOptions {
no_npm: self.options.no_npm(),
no_npm,
no_config: self.options.no_config(),
config,
nv_to_jsr_url: |nv| {
Expand Down
163 changes: 61 additions & 102 deletions cli/lsp/language_server.rs
Original file line number Diff line number Diff line change
Expand Up @@ -284,7 +284,8 @@ impl LanguageServer {
/// in the Deno cache, including any of their dependencies.
pub async fn cache_request(
&self,
params: Option<Value>,
specifiers: Vec<ModuleSpecifier>,
referrer: ModuleSpecifier,
) -> LspResult<Option<Value>> {
async fn create_graph_for_caching(
cli_options: CliOptions,
Expand Down Expand Up @@ -330,50 +331,44 @@ impl LanguageServer {
Ok(())
}

match params.map(serde_json::from_value) {
Some(Ok(params)) => {
// do as much as possible in a read, then do a write outside
let maybe_prepare_cache_result = {
let inner = self.0.read().await; // ensure dropped
match inner.prepare_cache(params) {
Ok(maybe_cache_result) => maybe_cache_result,
Err(err) => {
self
.0
.read()
.await
.client
.show_message(MessageType::WARNING, err);
return Err(LspError::internal_error());
}
}
};
if let Some(result) = maybe_prepare_cache_result {
let cli_options = result.cli_options;
let roots = result.roots;
let open_docs = result.open_docs;
let handle = spawn(async move {
create_graph_for_caching(cli_options, roots, open_docs).await
});
if let Err(err) = handle.await.unwrap() {
self
.0
.read()
.await
.client
.show_message(MessageType::WARNING, err);
}
// do npm resolution in a write—we should have everything
// cached by this point anyway
self.0.write().await.refresh_npm_specifiers().await;
// now refresh the data in a read
self.0.read().await.post_cache(result.mark).await;
// do as much as possible in a read, then do a write outside
let maybe_prepare_cache_result = {
let inner = self.0.read().await; // ensure dropped
match inner.prepare_cache(specifiers, referrer) {
Ok(maybe_cache_result) => maybe_cache_result,
Err(err) => {
self
.0
.read()
.await
.client
.show_message(MessageType::WARNING, err);
return Err(LspError::internal_error());
}
Ok(Some(json!(true)))
}
Some(Err(err)) => Err(LspError::invalid_params(err.to_string())),
None => Err(LspError::invalid_params("Missing parameters")),
};
if let Some(result) = maybe_prepare_cache_result {
let cli_options = result.cli_options;
let roots = result.roots;
let open_docs = result.open_docs;
let handle = spawn(async move {
create_graph_for_caching(cli_options, roots, open_docs).await
});
if let Err(err) = handle.await.unwrap() {
self
.0
.read()
.await
.client
.show_message(MessageType::WARNING, err);
}
// do npm resolution in a write—we should have everything
// cached by this point anyway
self.0.write().await.refresh_npm_specifiers().await;
// now refresh the data in a read
self.0.read().await.post_cache(result.mark).await;
}
Ok(Some(json!(true)))
}

/// This request is only used by the lsp integration tests to
Expand All @@ -396,12 +391,6 @@ impl LanguageServer {
Ok(Some(self.0.read().await.get_performance()))
}

pub async fn reload_import_registries_request(
&self,
) -> LspResult<Option<Value>> {
self.0.write().await.reload_import_registries().await
}

pub async fn task_definitions(&self) -> LspResult<Vec<TaskDefinition>> {
self.0.read().await.task_definitions()
}
Expand Down Expand Up @@ -1081,24 +1070,18 @@ impl Inner {
compiler_options_obj.get("jsxImportSource")
{
if let Some(jsx_import_source) = jsx_import_source.as_str() {
let cache_params = lsp_custom::CacheParams {
referrer: TextDocumentIdentifier {
uri: config_file.specifier.clone(),
},
uris: vec![TextDocumentIdentifier {
uri: Url::parse(&format!(
"data:application/typescript;base64,{}",
base64::engine::general_purpose::STANDARD.encode(
format!("import '{jsx_import_source}/jsx-runtime';")
)
))
.unwrap(),
}],
};
let specifiers = vec![Url::parse(&format!(
"data:application/typescript;base64,{}",
base64::engine::general_purpose::STANDARD.encode(format!(
"import '{jsx_import_source}/jsx-runtime';"
))
))
.unwrap()];
let referrer = config_file.specifier.clone();
self.task_queue.queue_task(Box::new(|ls: LanguageServer| {
spawn(async move {
if let Err(err) =
ls.cache_request(Some(json!(cache_params))).await
ls.cache_request(specifiers, referrer).await
{
lsp_warn!("{}", err);
}
Expand Down Expand Up @@ -3222,24 +3205,13 @@ impl tower_lsp::LanguageServer for LanguageServer {
) -> LspResult<Option<Value>> {
if params.command == "deno.cache" {
let mut arguments = params.arguments.into_iter();
let uris = serde_json::to_value(arguments.next()).unwrap();
let uris: Vec<Url> = serde_json::from_value(uris)
let specifiers = serde_json::to_value(arguments.next()).unwrap();
let specifiers: Vec<Url> = serde_json::from_value(specifiers)
.map_err(|err| LspError::invalid_params(err.to_string()))?;
let referrer = serde_json::to_value(arguments.next()).unwrap();
let referrer: Url = serde_json::from_value(referrer)
.map_err(|err| LspError::invalid_params(err.to_string()))?;
self
.cache_request(Some(
serde_json::to_value(lsp_custom::CacheParams {
referrer: TextDocumentIdentifier { uri: referrer },
uris: uris
.into_iter()
.map(|uri| TextDocumentIdentifier { uri })
.collect(),
})
.expect("well formed json"),
))
.await
self.cache_request(specifiers, referrer).await
} else if params.command == "deno.reloadImportRegistries" {
self.0.write().await.reload_import_registries().await
} else {
Expand Down Expand Up @@ -3421,7 +3393,7 @@ impl tower_lsp::LanguageServer for LanguageServer {

async fn did_save(&self, params: DidSaveTextDocumentParams) {
let uri = &params.text_document.uri;
{
let specifier = {
let mut inner = self.0.write().await;
let specifier = inner.url_map.normalize_url(uri, LspUrlKind::File);
inner.documents.save(&specifier);
Expand All @@ -3438,18 +3410,10 @@ impl tower_lsp::LanguageServer for LanguageServer {
Ok(path) if is_importable_ext(&path) => {}
_ => return,
}
}
if let Err(err) = self
.cache_request(Some(
serde_json::to_value(lsp_custom::CacheParams {
referrer: TextDocumentIdentifier { uri: uri.clone() },
uris: vec![TextDocumentIdentifier { uri: uri.clone() }],
})
.unwrap(),
))
.await
{
lsp_warn!("Failed to cache \"{}\" on save: {}", uri.to_string(), err);
specifier
};
if let Err(err) = self.cache_request(vec![], specifier.clone()).await {
lsp_warn!("Failed to cache \"{}\" on save: {}", &specifier, err);
}
}

Expand Down Expand Up @@ -3693,22 +3657,17 @@ struct PrepareCacheResult {
impl Inner {
fn prepare_cache(
&self,
params: lsp_custom::CacheParams,
specifiers: Vec<ModuleSpecifier>,
referrer: ModuleSpecifier,
) -> Result<Option<PrepareCacheResult>, AnyError> {
let referrer = self
.url_map
.normalize_url(&params.referrer.uri, LspUrlKind::File);
let mark = self.performance.mark_with_args("lsp.cache", &params);
let roots = if !params.uris.is_empty() {
params
.uris
.iter()
.map(|t| self.url_map.normalize_url(&t.uri, LspUrlKind::File))
.collect()
let mark = self
.performance
.mark_with_args("lsp.cache", (&specifiers, &referrer));
let roots = if !specifiers.is_empty() {
specifiers
} else {
vec![referrer]
};

let workspace_settings = self.config.workspace_settings();
let mut cli_options = CliOptions::new(
Flags {
Expand Down
14 changes: 0 additions & 14 deletions cli/lsp/lsp_custom.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,26 +4,12 @@ use deno_core::serde::Deserialize;
use deno_core::serde::Serialize;
use tower_lsp::lsp_types as lsp;

pub const CACHE_REQUEST: &str = "deno/cache";
pub const PERFORMANCE_REQUEST: &str = "deno/performance";
pub const TASK_REQUEST: &str = "deno/taskDefinitions";
pub const RELOAD_IMPORT_REGISTRIES_REQUEST: &str =
"deno/reloadImportRegistries";
pub const VIRTUAL_TEXT_DOCUMENT: &str = "deno/virtualTextDocument";
pub const LATEST_DIAGNOSTIC_BATCH_INDEX: &str =
"deno/internalLatestDiagnosticBatchIndex";

#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct CacheParams {
/// The document currently open in the editor. If there are no `uris`
/// supplied, the referrer will be cached.
pub referrer: lsp::TextDocumentIdentifier,
/// Any documents that have been specifically asked to be cached via the
/// command.
pub uris: Vec<lsp::TextDocumentIdentifier>,
}

#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct TaskDefinition {
Expand Down
10 changes: 0 additions & 10 deletions cli/lsp/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -48,20 +48,10 @@ pub async fn start() -> Result<(), AnyError> {
token.clone(),
)
})
// TODO(nayeemrmn): The extension has replaced this with the `deno.cache`
// command as of vscode_deno 3.21.0 / 2023.09.05. Remove this eventually.
.custom_method(lsp_custom::CACHE_REQUEST, LanguageServer::cache_request)
.custom_method(
lsp_custom::PERFORMANCE_REQUEST,
LanguageServer::performance_request,
)
// TODO(nayeemrmn): The extension has replaced this with the
// `deno.reloadImportRegistries` command as of vscode_deno
// 3.26.0 / 2023.10.10. Remove this eventually.
.custom_method(
lsp_custom::RELOAD_IMPORT_REGISTRIES_REQUEST,
LanguageServer::reload_import_registries_request,
)
.custom_method(lsp_custom::TASK_REQUEST, LanguageServer::task_definitions)
// TODO(nayeemrmn): Rename this to `deno/taskDefinitions` in vscode_deno and
// remove this alias.
Expand Down

0 comments on commit 3343fa4

Please sign in to comment.