Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Revert "fix: support workspace:^ and workspace:~ version constraints" #27187

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
229 changes: 91 additions & 138 deletions Cargo.lock

Large diffs are not rendered by default.

14 changes: 7 additions & 7 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -46,18 +46,18 @@ license = "MIT"
repository = "https://github.com/denoland/deno"

[workspace.dependencies]
deno_ast = { version = "=0.44.0", features = ["transpiling"] }
deno_ast = { version = "=0.43.3", features = ["transpiling"] }
deno_core = { version = "0.323.0" }

deno_bench_util = { version = "0.174.0", path = "./bench_util" }
deno_config = { version = "=0.39.3", features = ["workspace", "sync"] }
deno_lockfile = "=0.23.2"
deno_config = { version = "=0.39.2", features = ["workspace", "sync"] }
deno_lockfile = "=0.23.1"
deno_media_type = { version = "0.2.0", features = ["module_specifier"] }
deno_npm = "=0.25.5"
deno_npm = "=0.25.4"
deno_path_util = "=0.2.1"
deno_permissions = { version = "0.40.0", path = "./runtime/permissions" }
deno_runtime = { version = "0.189.0", path = "./runtime" }
deno_semver = "=0.6.0"
deno_semver = "=0.5.16"
deno_terminal = "0.2.0"
napi_sym = { version = "0.110.0", path = "./ext/napi/sym" }
test_util = { package = "test_server", path = "./tests/util/server" }
Expand Down Expand Up @@ -115,8 +115,8 @@ console_static_text = "=0.8.1"
dashmap = "5.5.3"
data-encoding = "2.3.3"
data-url = "=0.3.0"
deno_cache_dir = "=0.14.0"
deno_package_json = { version = "0.2.1", default-features = false }
deno_cache_dir = "=0.13.2"
deno_package_json = { version = "0.1.2", default-features = false }
dlopen2 = "0.6.1"
ecb = "=0.1.2"
elliptic-curve = { version = "0.13.4", features = ["alloc", "arithmetic", "ecdh", "std", "pem", "jwk"] }
Expand Down
8 changes: 4 additions & 4 deletions cli/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -72,9 +72,9 @@ deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "proposa
deno_cache_dir.workspace = true
deno_config.workspace = true
deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] }
deno_doc = { version = "=0.161.2", features = ["rust", "comrak"] }
deno_graph = { version = "=0.86.3" }
deno_lint = { version = "=0.68.1", features = ["docs"] }
deno_doc = { version = "=0.161.1", features = ["rust", "comrak"] }
deno_graph = { version = "=0.86.2" }
deno_lint = { version = "=0.68.0", features = ["docs"] }
deno_lockfile.workspace = true
deno_npm.workspace = true
deno_package_json.workspace = true
Expand Down Expand Up @@ -108,7 +108,7 @@ dotenvy = "0.15.7"
dprint-plugin-json = "=0.19.4"
dprint-plugin-jupyter = "=0.1.5"
dprint-plugin-markdown = "=0.17.8"
dprint-plugin-typescript = "=0.93.3"
dprint-plugin-typescript = "=0.93.2"
env_logger = "=0.10.0"
fancy-regex = "=0.10.0"
faster-hex.workspace = true
Expand Down
4 changes: 3 additions & 1 deletion cli/args/deno_json.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,12 @@ impl<'a> deno_config::fs::DenoConfigFs for DenoConfigFsAdapter<'a> {
fn read_to_string_lossy(
&self,
path: &std::path::Path,
) -> Result<std::borrow::Cow<'static, str>, std::io::Error> {
) -> Result<String, std::io::Error> {
self
.0
.read_text_file_lossy_sync(path, None)
// todo(https://github.com/denoland/deno_config/pull/140): avoid clone
.map(|s| s.into_owned())
.map_err(|err| err.into_io_error())
}

Expand Down
7 changes: 2 additions & 5 deletions cli/args/lockfile.rs
Original file line number Diff line number Diff line change
Expand Up @@ -109,12 +109,9 @@ impl CliLockfile {
let Some(pkg_json) = maybe_pkg_json else {
return Default::default();
};
let deps = pkg_json.resolve_local_package_json_deps();

deps
.dependencies
pkg_json
.resolve_local_package_json_deps()
.values()
.chain(deps.dev_dependencies.values())
.filter_map(|dep| dep.as_ref().ok())
.filter_map(|dep| match dep {
PackageJsonDepValue::Req(req) => {
Expand Down
23 changes: 3 additions & 20 deletions cli/args/package_json.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,8 @@ use deno_core::serde_json;
use deno_core::url::Url;
use deno_package_json::PackageJsonDepValue;
use deno_package_json::PackageJsonDepValueParseError;
use deno_package_json::PackageJsonDepWorkspaceReq;
use deno_semver::npm::NpmPackageReqReference;
use deno_semver::package::PackageReq;
use deno_semver::VersionReq;
use thiserror::Error;

#[derive(Debug)]
Expand Down Expand Up @@ -97,14 +95,8 @@ impl NpmInstallDepsProvider {

if let Some(pkg_json) = &folder.pkg_json {
let deps = pkg_json.resolve_local_package_json_deps();
let mut pkg_pkgs = Vec::with_capacity(
deps.dependencies.len() + deps.dev_dependencies.len(),
);
for (alias, dep) in deps
.dependencies
.into_iter()
.chain(deps.dev_dependencies.into_iter())
{
let mut pkg_pkgs = Vec::with_capacity(deps.len());
for (alias, dep) in deps {
let dep = match dep {
Ok(dep) => dep,
Err(err) => {
Expand Down Expand Up @@ -139,16 +131,7 @@ impl NpmInstallDepsProvider {
});
}
}
PackageJsonDepValue::Workspace(workspace_version_req) => {
let version_req = match workspace_version_req {
PackageJsonDepWorkspaceReq::VersionReq(version_req) => {
version_req
}
PackageJsonDepWorkspaceReq::Tilde
| PackageJsonDepWorkspaceReq::Caret => {
VersionReq::parse_from_npm("*").unwrap()
}
};
PackageJsonDepValue::Workspace(version_req) => {
if let Some(pkg) = workspace_npm_pkgs.iter().find(|pkg| {
pkg.matches_name_and_version_req(&alias, &version_req)
}) {
Expand Down
15 changes: 5 additions & 10 deletions cli/cache/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@ use deno_graph::source::Loader;
use deno_runtime::deno_fs;
use deno_runtime::deno_permissions::PermissionsContainer;
use node_resolver::InNpmPackageChecker;
use std::borrow::Cow;
use std::collections::HashMap;
use std::path::Path;
use std::path::PathBuf;
Expand Down Expand Up @@ -68,11 +67,8 @@ pub const CACHE_PERM: u32 = 0o644;
pub struct RealDenoCacheEnv;

impl deno_cache_dir::DenoCacheEnv for RealDenoCacheEnv {
fn read_file_bytes(
&self,
path: &Path,
) -> std::io::Result<Cow<'static, [u8]>> {
std::fs::read(path).map(Cow::Owned)
fn read_file_bytes(&self, path: &Path) -> std::io::Result<Vec<u8>> {
std::fs::read(path)
}

fn atomic_write_file(
Expand Down Expand Up @@ -116,13 +112,12 @@ pub struct DenoCacheEnvFsAdapter<'a>(
);

impl<'a> deno_cache_dir::DenoCacheEnv for DenoCacheEnvFsAdapter<'a> {
fn read_file_bytes(
&self,
path: &Path,
) -> std::io::Result<Cow<'static, [u8]>> {
fn read_file_bytes(&self, path: &Path) -> std::io::Result<Vec<u8>> {
self
.0
.read_file_sync(path, None)
// todo(https://github.com/denoland/deno_cache_dir/pull/66): avoid clone
.map(|bytes| bytes.into_owned())
.map_err(|err| err.into_io_error())
}

Expand Down
7 changes: 1 addition & 6 deletions cli/factory.rs
Original file line number Diff line number Diff line change
Expand Up @@ -504,12 +504,7 @@ impl CliFactory {
let resolver = cli_options
.create_workspace_resolver(
self.file_fetcher()?,
if cli_options.use_byonm()
&& !matches!(
cli_options.sub_command(),
DenoSubcommand::Publish(_)
)
{
if cli_options.use_byonm() {
PackageJsonDepResolution::Disabled
} else {
// todo(dsherret): this should be false for nodeModulesDir: true
Expand Down
2 changes: 1 addition & 1 deletion cli/file_fetcher.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1540,7 +1540,7 @@ mod tests {
.unwrap()
.unwrap()
.content;
String::from_utf8(bytes.into_owned()).unwrap()
String::from_utf8(bytes).unwrap()
}

#[track_caller]
Expand Down
5 changes: 2 additions & 3 deletions cli/lsp/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,6 @@ use deno_path_util::url_to_file_path;
use deno_runtime::deno_node::PackageJson;
use indexmap::IndexSet;
use lsp_types::ClientCapabilities;
use std::borrow::Cow;
use std::collections::BTreeMap;
use std::collections::BTreeSet;
use std::collections::HashMap;
Expand Down Expand Up @@ -2093,7 +2092,7 @@ impl<T: Clone> CachedFsItems<T> {
#[derive(Default)]
struct InnerData {
stat_calls: CachedFsItems<deno_config::fs::FsMetadata>,
read_to_string_calls: CachedFsItems<Cow<'static, str>>,
read_to_string_calls: CachedFsItems<String>,
}

#[derive(Default)]
Expand All @@ -2114,7 +2113,7 @@ impl DenoConfigFs for CachedDenoConfigFs {
fn read_to_string_lossy(
&self,
path: &Path,
) -> Result<Cow<'static, str>, std::io::Error> {
) -> Result<String, std::io::Error> {
self
.0
.lock()
Expand Down
2 changes: 1 addition & 1 deletion cli/lsp/documents.rs
Original file line number Diff line number Diff line change
Expand Up @@ -925,7 +925,7 @@ impl FileSystemDocuments {
let content = bytes_to_content(
specifier,
media_type,
cached_file.content.into_owned(),
cached_file.content,
maybe_charset,
)
.ok()?;
Expand Down
2 changes: 1 addition & 1 deletion cli/lsp/jsr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -262,7 +262,7 @@ fn read_cached_url(
cache
.get(&cache.cache_item_key(url).ok()?, None)
.ok()?
.map(|f| f.content.into_owned())
.map(|f| f.content)
}

#[derive(Debug)]
Expand Down
62 changes: 49 additions & 13 deletions cli/tools/registry/diagnostics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -169,7 +169,7 @@ impl Diagnostic for PublishDiagnostic {
..
}) => DiagnosticLevel::Warning,
FastCheck(_) => DiagnosticLevel::Error,
SpecifierUnfurl(d) => d.level(),
SpecifierUnfurl(_) => DiagnosticLevel::Warning,
InvalidPath { .. } => DiagnosticLevel::Error,
DuplicatePath { .. } => DiagnosticLevel::Error,
UnsupportedFileType { .. } => DiagnosticLevel::Warning,
Expand All @@ -187,7 +187,7 @@ impl Diagnostic for PublishDiagnostic {
use PublishDiagnostic::*;
match &self {
FastCheck(diagnostic) => diagnostic.code(),
SpecifierUnfurl(diagnostic) => diagnostic.code(),
SpecifierUnfurl(diagnostic) => Cow::Borrowed(diagnostic.code()),
InvalidPath { .. } => Cow::Borrowed("invalid-path"),
DuplicatePath { .. } => Cow::Borrowed("case-insensitive-duplicate-path"),
UnsupportedFileType { .. } => Cow::Borrowed("unsupported-file-type"),
Expand All @@ -207,7 +207,7 @@ impl Diagnostic for PublishDiagnostic {
use PublishDiagnostic::*;
match &self {
FastCheck(diagnostic) => diagnostic.message(),
SpecifierUnfurl(diagnostic) => diagnostic.message(),
SpecifierUnfurl(diagnostic) => Cow::Borrowed(diagnostic.message()),
InvalidPath { message, .. } => Cow::Borrowed(message.as_str()),
DuplicatePath { .. } => {
Cow::Borrowed("package path is a case insensitive duplicate of another path in the package")
Expand Down Expand Up @@ -243,7 +243,17 @@ impl Diagnostic for PublishDiagnostic {
use PublishDiagnostic::*;
match &self {
FastCheck(diagnostic) => diagnostic.location(),
SpecifierUnfurl(diagnostic) => diagnostic.location(),
SpecifierUnfurl(diagnostic) => match diagnostic {
SpecifierUnfurlerDiagnostic::UnanalyzableDynamicImport {
specifier,
text_info,
range,
} => DiagnosticLocation::ModulePosition {
specifier: Cow::Borrowed(specifier),
text_info: Cow::Borrowed(text_info),
source_pos: DiagnosticSourcePos::SourcePos(range.start),
},
},
InvalidPath { path, .. } => {
DiagnosticLocation::Path { path: path.clone() }
}
Expand Down Expand Up @@ -315,8 +325,24 @@ impl Diagnostic for PublishDiagnostic {

use PublishDiagnostic::*;
match &self {
FastCheck(d) => d.snippet(),
SpecifierUnfurl(d) => d.snippet(),
FastCheck(diagnostic) => diagnostic.snippet(),
SpecifierUnfurl(diagnostic) => match diagnostic {
SpecifierUnfurlerDiagnostic::UnanalyzableDynamicImport {
text_info,
range,
..
} => Some(DiagnosticSnippet {
source: Cow::Borrowed(text_info),
highlights: vec![DiagnosticSnippetHighlight {
style: DiagnosticSnippetHighlightStyle::Warning,
range: DiagnosticSourceRange {
start: DiagnosticSourcePos::SourcePos(range.start),
end: DiagnosticSourcePos::SourcePos(range.end),
},
description: Some("the unanalyzable dynamic import".into()),
}],
}),
},
InvalidPath { .. } => None,
DuplicatePath { .. } => None,
UnsupportedFileType { .. } => None,
Expand Down Expand Up @@ -354,7 +380,7 @@ impl Diagnostic for PublishDiagnostic {
use PublishDiagnostic::*;
match &self {
FastCheck(diagnostic) => diagnostic.hint(),
SpecifierUnfurl(d) => d.hint(),
SpecifierUnfurl(_) => None,
InvalidPath { .. } => Some(
Cow::Borrowed("rename or remove the file, or add it to 'publish.exclude' in the config file"),
),
Expand Down Expand Up @@ -410,9 +436,9 @@ impl Diagnostic for PublishDiagnostic {
None => None,
}
}
SyntaxError(d) => d.snippet_fixed(),
SpecifierUnfurl(d) => d.snippet_fixed(),
SyntaxError(diagnostic) => diagnostic.snippet_fixed(),
FastCheck(_)
| SpecifierUnfurl(_)
| InvalidPath { .. }
| DuplicatePath { .. }
| UnsupportedFileType { .. }
Expand All @@ -427,8 +453,16 @@ impl Diagnostic for PublishDiagnostic {
fn info(&self) -> Cow<'_, [Cow<'_, str>]> {
use PublishDiagnostic::*;
match &self {
FastCheck(d) => d.info(),
SpecifierUnfurl(d) => d.info(),
FastCheck(diagnostic) => {
diagnostic.info()
}
SpecifierUnfurl(diagnostic) => match diagnostic {
SpecifierUnfurlerDiagnostic::UnanalyzableDynamicImport { .. } => Cow::Borrowed(&[
Cow::Borrowed("after publishing this package, imports from the local import map / package.json do not work"),
Cow::Borrowed("dynamic imports that can not be analyzed at publish time will not be rewritten automatically"),
Cow::Borrowed("make sure the dynamic import is resolvable at runtime without an import map / package.json")
]),
},
InvalidPath { .. } => Cow::Borrowed(&[
Cow::Borrowed("to portably support all platforms, including windows, the allowed characters in package paths are limited"),
]),
Expand Down Expand Up @@ -469,8 +503,10 @@ impl Diagnostic for PublishDiagnostic {
fn docs_url(&self) -> Option<Cow<'_, str>> {
use PublishDiagnostic::*;
match &self {
FastCheck(d) => d.docs_url(),
SpecifierUnfurl(d) => d.docs_url(),
FastCheck(diagnostic) => diagnostic.docs_url(),
SpecifierUnfurl(diagnostic) => match diagnostic {
SpecifierUnfurlerDiagnostic::UnanalyzableDynamicImport { .. } => None,
},
InvalidPath { .. } => {
Some(Cow::Borrowed("https://jsr.io/go/invalid-path"))
}
Expand Down
Loading