diff --git a/Cargo.lock b/Cargo.lock index 427dc592184b26..2b333d6eaa77d6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -417,6 +417,7 @@ dependencies = [ "nix", "notify", "os_pipe", + "percent-encoding", "rand 0.7.3", "regex", "reqwest", diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 5a969b785fefb6..f5be543a167d5a 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -52,6 +52,7 @@ libc = "0.2.74" log = "0.4.11" env_logger = "0.7.1" notify = "5.0.0-pre.3" +percent-encoding = "2.1.0" rand = "0.7.3" regex = "1.3.9" reqwest = { version = "0.10.7", default-features = false, features = ["rustls-tls", "stream", "gzip", "brotli"] } diff --git a/cli/disk_cache.rs b/cli/disk_cache.rs index d6fb9866c242f1..37aad764e8a4a6 100644 --- a/cli/disk_cache.rs +++ b/cli/disk_cache.rs @@ -103,6 +103,9 @@ impl DiskCache { out = out.join(remaining_components); } + "data" => { + out.push(crate::checksum::gen(&[url.as_str().as_bytes()])); + } scheme => { unimplemented!( "Don't know how to create cache name for scheme: {}", diff --git a/cli/file_fetcher.rs b/cli/file_fetcher.rs index 8b3ca46a007b49..f6400027aff5fe 100644 --- a/cli/file_fetcher.rs +++ b/cli/file_fetcher.rs @@ -100,7 +100,7 @@ impl SourceFileCache { } } -const SUPPORTED_URL_SCHEMES: [&str; 3] = ["http", "https", "file"]; +const SUPPORTED_URL_SCHEMES: [&str; 4] = ["http", "https", "file", "data"]; #[derive(Clone)] pub struct SourceFileFetcher { @@ -278,6 +278,7 @@ impl SourceFileFetcher { ) -> Result, ErrBox> { let url_scheme = module_url.scheme(); let is_local_file = url_scheme == "file"; + let is_data_url = url_scheme == "data"; SourceFileFetcher::check_if_supported_scheme(&module_url)?; // Local files are always fetched from disk bypassing cache entirely. @@ -285,6 +286,10 @@ impl SourceFileFetcher { return self.fetch_local_file(&module_url, permissions).map(Some); } + if is_data_url { + return extract_data_url(module_url).map(Some); + } + self.fetch_cached_remote_source(&module_url, 10) } @@ -309,6 +314,7 @@ impl SourceFileFetcher { ) -> Result { let url_scheme = module_url.scheme(); let is_local_file = url_scheme == "file"; + let is_data_url = url_scheme == "data"; SourceFileFetcher::check_if_supported_scheme(&module_url)?; // Local files are always fetched from disk bypassing cache entirely. @@ -316,6 +322,10 @@ impl SourceFileFetcher { return self.fetch_local_file(&module_url, permissions); } + if is_data_url { + return extract_data_url(module_url); + } + // The file is remote, fail if `no_remote` is true. if no_remote { let e = std::io::Error::new( @@ -552,6 +562,36 @@ impl SourceFileFetcher { } } +fn extract_data_url(url: &Url) -> Result { + assert_eq!(url.scheme(), "data"); + let url_content = &url.as_str()[5..]; + let mut part_iterator = url_content.splitn(2, ','); + + let media_type_str = part_iterator.next().unwrap(); + let data = if let Some(d) = part_iterator.next() { + d + } else { + return Err(ErrBox::new("URIError", "Malformed data url, missing comma")); + }; + + let filename = PathBuf::new(); + let (media_type, charset) = map_content_type(&filename, Some(media_type_str)); + let is_base64 = media_type_str.rsplit(';').any(|v| v == "base64"); + let bytes = if is_base64 { + base64::decode(data)? + } else { + percent_encoding::percent_decode_str(data).collect::>() + }; + + Ok(SourceFile { + url: url.clone(), + filename, + types_header: None, + media_type, + source_code: TextDocument::new(bytes, charset), + }) +} + pub fn map_file_extension(path: &Path) -> msg::MediaType { match path.extension() { None => msg::MediaType::Unknown, diff --git a/cli/global_state.rs b/cli/global_state.rs index 2dfec4a72274ae..9bebb7bc5bf5b7 100644 --- a/cli/global_state.rs +++ b/cli/global_state.rs @@ -118,6 +118,7 @@ impl GlobalState { self.file_fetcher.clone(), maybe_import_map, permissions.clone(), + self.flags.unstable, is_dyn_import, false, ); diff --git a/cli/info.rs b/cli/info.rs index c876c57d5b1eb2..30a6d2ba127b7e 100644 --- a/cli/info.rs +++ b/cli/info.rs @@ -34,6 +34,7 @@ impl ModuleDepInfo { global_state.file_fetcher.clone(), global_state.maybe_import_map.clone(), Permissions::allow_all(), + global_state.flags.unstable, false, true, ); diff --git a/cli/main.rs b/cli/main.rs index 0d55ea1dfa4410..6079974b75e570 100644 --- a/cli/main.rs +++ b/cli/main.rs @@ -478,6 +478,7 @@ async fn run_with_watch(flags: Flags, script: String) -> Result<(), ErrBox> { global_state.file_fetcher.clone(), global_state.maybe_import_map.clone(), Permissions::allow_all(), + flags.unstable, false, false, ); diff --git a/cli/module_graph.rs b/cli/module_graph.rs index 40147c44c1c00c..7c96c2a0a8d8ad 100644 --- a/cli/module_graph.rs +++ b/cli/module_graph.rs @@ -7,6 +7,7 @@ use crate::file_fetcher::SourceFileFetcher; use crate::import_map::ImportMap; use crate::msg::MediaType; use crate::permissions::Permissions; +use crate::state::exit_unstable; use crate::swc_util::Location; use crate::tsc::pre_process_file; use crate::tsc::ImportDesc; @@ -44,20 +45,31 @@ fn err_with_location(e: ErrBox, maybe_location: Option<&Location>) -> ErrBox { } /// Disallow http:// imports from modules loaded over https:// +/// Disallow any imports from modules loaded with data: fn validate_no_downgrade( module_specifier: &ModuleSpecifier, maybe_referrer: Option<&ModuleSpecifier>, maybe_location: Option<&Location>, ) -> Result<(), ErrBox> { if let Some(referrer) = maybe_referrer.as_ref() { - if let "https" = referrer.as_url().scheme() { - if let "http" = module_specifier.as_url().scheme() { - let e = ErrBox::new("PermissionDenied", - "Modules loaded over https:// are not allowed to import modules over http://" + match referrer.as_url().scheme() { + "https" => { + if let "http" = module_specifier.as_url().scheme() { + let e = ErrBox::new("PermissionDenied", + "Modules loaded over https:// are not allowed to import modules over http://" + ); + return Err(err_with_location(e, maybe_location)); + }; + } + "data" => { + let e = ErrBox::new( + "PermissionDenied", + "Modules loaded using data URL are not allowed to import other modules", ); return Err(err_with_location(e, maybe_location)); - }; - }; + } + _ => {} + } }; Ok(()) @@ -75,7 +87,7 @@ fn validate_no_file_from_remote( "http" | "https" => { let specifier_url = module_specifier.as_url(); match specifier_url.scheme() { - "http" | "https" => {} + "http" | "https" | "data" => {} _ => { let e = ErrBox::new( "PermissionDenied", @@ -257,6 +269,7 @@ pub struct ModuleGraphLoader { pending_downloads: FuturesUnordered, has_downloaded: HashSet, graph: ModuleGraph, + is_unstable: bool, is_dyn_import: bool, analyze_dynamic_imports: bool, } @@ -266,6 +279,7 @@ impl ModuleGraphLoader { file_fetcher: SourceFileFetcher, maybe_import_map: Option, permissions: Permissions, + is_unstable: bool, is_dyn_import: bool, analyze_dynamic_imports: bool, ) -> Self { @@ -276,6 +290,7 @@ impl ModuleGraphLoader { pending_downloads: FuturesUnordered::new(), has_downloaded: HashSet::new(), graph: ModuleGraph::new(), + is_unstable, is_dyn_import, analyze_dynamic_imports, } @@ -405,6 +420,10 @@ impl ModuleGraphLoader { return Ok(()); } + if !self.is_unstable && module_specifier.as_url().scheme() == "data" { + exit_unstable("data imports"); + } + validate_no_downgrade( &module_specifier, maybe_referrer.as_ref(), @@ -600,6 +619,7 @@ mod tests { global_state.file_fetcher.clone(), None, Permissions::allow_all(), + global_state.flags.unstable, false, false, ); @@ -873,7 +893,7 @@ fn test_pre_process_file() { let source = r#" // This comment is placed to make sure that directives are parsed // even when they start on non-first line - + /// /// /// @@ -888,7 +908,7 @@ import * as qat from "./type_definitions/qat.ts"; console.log(foo); console.log(fizz); -console.log(qat.qat); +console.log(qat.qat); "#; let (imports, references) = diff --git a/cli/state.rs b/cli/state.rs index 3df4ffb3bffa28..17295cecdbc363 100644 --- a/cli/state.rs +++ b/cli/state.rs @@ -300,6 +300,7 @@ impl State { self.check_read(Path::new(&path))?; Ok(()) } + "data" => Ok(()), _ => unreachable!(), } } diff --git a/cli/tests/data_import_invalid.js b/cli/tests/data_import_invalid.js new file mode 100644 index 00000000000000..a7dce85c71ccb4 --- /dev/null +++ b/cli/tests/data_import_invalid.js @@ -0,0 +1 @@ +import _invalid from "data:"; diff --git a/cli/tests/data_import_invalid.out b/cli/tests/data_import_invalid.out new file mode 100644 index 00000000000000..2dfc748fd06371 --- /dev/null +++ b/cli/tests/data_import_invalid.out @@ -0,0 +1,2 @@ +error: Malformed data url, missing comma +Imported from [WILDCARD] \ No newline at end of file diff --git a/cli/tests/data_import_origin_upgrade.js b/cli/tests/data_import_origin_upgrade.js new file mode 100644 index 00000000000000..5d6c297154bb68 --- /dev/null +++ b/cli/tests/data_import_origin_upgrade.js @@ -0,0 +1,2 @@ +// export default from "https://deno.land/std/version.ts"; +import _upgrade from "data:application/javascript;base64,ZXhwb3J0IGRlZmF1bHQgZnJvbSAiaHR0cHM6Ly9kZW5vLmxhbmQvc3RkL3ZlcnNpb24udHMiOw=="; diff --git a/cli/tests/data_import_origin_upgrade.out b/cli/tests/data_import_origin_upgrade.out new file mode 100644 index 00000000000000..00c4052c9fb8bd --- /dev/null +++ b/cli/tests/data_import_origin_upgrade.out @@ -0,0 +1,2 @@ +error: Modules loaded using data URL are not allowed to import other modules +Imported from [WILDCARD] \ No newline at end of file diff --git a/cli/tests/data_import_test.js b/cli/tests/data_import_test.js new file mode 100644 index 00000000000000..bbbc67743859f7 --- /dev/null +++ b/cli/tests/data_import_test.js @@ -0,0 +1,59 @@ +import { assertEquals } from "../../std/testing/asserts.ts"; + +// export const value = 'Successful import'; export default value; +import data1 from "data:application/javascript;base64,ZXhwb3J0IGNvbnN0IHZhbHVlID0gJ1N1Y2Nlc3NmdWwgaW1wb3J0JzsgZXhwb3J0IGRlZmF1bHQgdmFsdWU7"; + +Deno.test("static base64 data url import", () => { + assertEquals(data1, "Successful import"); +}); + +Deno.test("dynamic base64 data url import", async () => { + const data2 = await import( + // export const leet = 1337 + "data:application/javascript;base64,ZXhwb3J0IGNvbnN0IGxlZXQgPSAxMzM3" + ); + assertEquals(data2.leet, 1337); +}); + +Deno.test("dynamic percent-encoding data url import", async () => { + const data3 = await import( + // export const value = 42; + "data:application/javascript,export%20const%20value%20%3D%2042%3B" + ); + assertEquals(data3.value, 42); +}); + +Deno.test("dynamic base64 typescript data url import", async () => { + const data2 = await import( + // export const leet: number = 1337; + "data:application/typescript;base64,ZXhwb3J0IGNvbnN0IGxlZXQ6IG51bWJlciA9IDEzMzc7" + ); + assertEquals(data2.leet, 1337); +}); + +Deno.test("spawn worker with data url", async () => { + let resolve, timeout; + const promise = new Promise((res, rej) => { + resolve = res; + timeout = setTimeout(() => rej("Worker timed out"), 2000); + }); + + const worker = new Worker( + "data:application/javascript," + + encodeURIComponent("self.onmessage = () => self.postMessage('Worker');"), + { type: "module" }, + ); + + worker.onmessage = (m) => { + if (m.data === "Worker") { + resolve(); + } + }; + + worker.postMessage(); + + await promise; + + clearTimeout(timeout); + worker.terminate(); +}); diff --git a/cli/tests/data_import_test.out b/cli/tests/data_import_test.out new file mode 100644 index 00000000000000..2c1f33eea341b8 --- /dev/null +++ b/cli/tests/data_import_test.out @@ -0,0 +1,3 @@ +[WILDCARD] +test result: ok. 5 passed; [WILDCARD] + diff --git a/cli/tests/integration_tests.rs b/cli/tests/integration_tests.rs index ec13f73805b092..dac257e8b215a5 100644 --- a/cli/tests/integration_tests.rs +++ b/cli/tests/integration_tests.rs @@ -2394,6 +2394,23 @@ itest!(info_type_import { output: "info_type_import.out", }); +itest!(data_import { + args: "test --reload --unstable data_import_test.js", + output: "data_import_test.out", +}); + +itest!(data_import_invalid { + args: "test --reload --unstable data_import_invalid.js", + output: "data_import_invalid.out", + exit_code: 1, +}); + +itest!(data_import_origin_upgrade { + args: "test --reload --unstable data_import_origin_upgrade.js", + output: "data_import_origin_upgrade.out", + exit_code: 1, +}); + #[test] fn cafile_env_fetch() { use url::Url; diff --git a/cli/tests/unsupported_dynamic_import_scheme.out b/cli/tests/unsupported_dynamic_import_scheme.out index 2a1a4e01f02b78..8a7dba2b94329b 100644 --- a/cli/tests/unsupported_dynamic_import_scheme.out +++ b/cli/tests/unsupported_dynamic_import_scheme.out @@ -2,4 +2,5 @@ error: Uncaught TypeError: Unsupported scheme "xxx" for module "xxx:". Supported "http", "https", "file", + "data", ] diff --git a/cli/tsc.rs b/cli/tsc.rs index d78485fd3b83fd..6b238ea36532c2 100644 --- a/cli/tsc.rs +++ b/cli/tsc.rs @@ -679,6 +679,7 @@ impl TsCompiler { self.file_fetcher.clone(), global_state.maybe_import_map.clone(), permissions.clone(), + global_state.flags.unstable, false, true, ); @@ -1157,6 +1158,7 @@ async fn create_runtime_module_graph( global_state.file_fetcher.clone(), None, permissions, + global_state.flags.unstable, false, false, ); @@ -1676,6 +1678,7 @@ mod tests { file_fetcher.clone(), None, Permissions::allow_all(), + mock_state.flags.unstable, false, false, ); @@ -1752,6 +1755,7 @@ mod tests { file_fetcher.clone(), None, Permissions::allow_all(), + mock_state.flags.unstable, false, false, );