From 4ef4ca00f6795ba779ce367726073c60cd14575f Mon Sep 17 00:00:00 2001 From: Iban Eguia Moraza Date: Wed, 2 Sep 2020 16:04:47 +0200 Subject: [PATCH 1/6] Started with the test262 implementation --- .github/workflows/test262.yml | 69 ++++ .gitignore | 5 +- .gitmodules | 3 + CONTRIBUTING.md | 19 ++ Cargo.lock | 62 +++- Cargo.toml | 1 + boa/src/builtins/array/mod.rs | 5 +- boa/src/builtins/math/tests.rs | 2 +- boa/src/exec/tests.rs | 20 +- boa/src/lib.rs | 39 ++- boa/src/syntax/ast/position.rs | 2 + boa/src/syntax/lexer/tests.rs | 74 ++--- .../parser/cursor/buffered_lexer/tests.rs | 28 +- boa/src/syntax/parser/tests.rs | 33 +- boa_cli/Cargo.toml | 2 +- boa_wasm/src/lib.rs | 22 +- test262 | 1 + test_ignore.txt | 28 ++ tester/Cargo.toml | 19 ++ tester/src/exec.rs | 194 +++++++++++ tester/src/main.rs | 303 ++++++++++++++++++ tester/src/read.rs | 257 +++++++++++++++ tester/src/results.rs | 97 ++++++ 23 files changed, 1169 insertions(+), 116 deletions(-) create mode 100644 .github/workflows/test262.yml create mode 100644 .gitmodules create mode 160000 test262 create mode 100644 test_ignore.txt create mode 100644 tester/Cargo.toml create mode 100644 tester/src/exec.rs create mode 100644 tester/src/main.rs create mode 100644 tester/src/read.rs create mode 100644 tester/src/results.rs diff --git a/.github/workflows/test262.yml b/.github/workflows/test262.yml new file mode 100644 index 00000000000..ef1eb3e6198 --- /dev/null +++ b/.github/workflows/test262.yml @@ -0,0 +1,69 @@ +name: EcmaScript official test suite (test262) +on: + push: + branches: + - master + tags: + - v* + pull_request: + branches: + - master + +jobs: + run_test262: + name: Run the test262 test suite + runs-on: ubuntu-latest + steps: + - name: Checkout the repository + uses: actions/checkout@v2 + with: + submodules: true + - name: Install the Rust toolchain + uses: actions-rs/toolchain@v1 + with: + toolchain: stable + override: true + profile: minimal + - name: Cache cargo registry + uses: actions/cache@v1 + with: + path: ~/.cargo/registry + key: ${{ runner.os }}-cargo-registry-${{ hashFiles('**/Cargo.lock') }} + - name: Cache cargo index + uses: actions/cache@v1 + with: + path: ~/.cargo/git + key: ${{ runner.os }}-cargo-index-${{ hashFiles('**/Cargo.lock') }} + - name: Cache cargo build + uses: actions/cache@v1 + with: + path: target + key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('**/Cargo.lock') }} + + # Run the test suite and upload the results + - name: Checkout GitHub pages + if: github.event_name == 'push' + uses: actions/checkout@v2 + with: + ref: gh-pages + path: gh-pages + - run: mkdir -p gh-pages/test262 + + - name: Run the test262 test suite + run: cargo run --release --bin tester -- -o gh-pages/test262 + + - name: Commit files + if: github.event_name == 'push' + run: | + cd gh-pages + git config --local user.email "action@github.com" + git config --local user.name "GitHub Action" + git pull + git commit -m "Add new test262 results" -a + cd .. + - name: Upload results + if: github.event_name == 'push' + uses: ad-m/github-push-action@v0.6.0 + with: + directory: gh-pages + github_token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.gitignore b/.gitignore index 57085344047..5f2745f75f4 100644 --- a/.gitignore +++ b/.gitignore @@ -23,4 +23,7 @@ tests/js/test.js *.string_data *.string_index *.events -chrome_profiler.json +chrome_profiler.json + +# Logs +*.log diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 00000000000..c41542feb22 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "test262"] + path = test262 + url = https://github.com/tc39/test262.git diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 6049b16b731..0a33f259d89 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -65,6 +65,25 @@ There are some pre-defined tasks in [tasks.json](.vscode/tasks.json) If you don't want to install everything on your machine, you can use the Dockerfile. Start VSCode in container mode (you may need the docker container plugin) and use the Dockerfile. +## Testing + +Boa provides its own test suite, and can also run the official ECMAScript test suite. To run the Boa test +suite, you can just run the normal `cargo test`, and to run the full ECMAScript test suite, you can run it +with this command: + +``` +cargo run --release --bin tester -- -v 2> error.log +``` + +Note that this requires the `test262` submodule to be checked out, so you will need to run the following first: + +``` +git submodule init && git submodule update +``` + +This will run the test suite in verbose mode (you can remove the `-- -v` part to run it in non-verbose mode), +and output nice colorings in the terminal. It will also output any panic information into the `error.log` file. + ## Communication We have a Discord server, feel free to ask questions here: diff --git a/Cargo.lock b/Cargo.lock index d82b5beccdc..3bdfe7271c1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -339,6 +339,12 @@ dependencies = [ "winapi", ] +[[package]] +name = "dtoa" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "134951f4028bdadb9b84baf4232681efbf277da25144b9b0ad65df75946c422b" + [[package]] name = "either" version = "1.6.0" @@ -360,6 +366,15 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2022715d62ab30faffd124d40b76f4134a550a87792276512b18d63272333394" +[[package]] +name = "fxhash" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c" +dependencies = [ + "byteorder", +] + [[package]] name = "gc" version = "0.3.6" @@ -421,9 +436,9 @@ dependencies = [ [[package]] name = "indexmap" -version = "1.5.1" +version = "1.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86b45e59b16c76b11bf9738fd5d38879d3bd28ad292d7b313608becb17ae2df9" +checksum = "4e47a3566dd4fd4eec714ae6ceabdee0caec795be835c223d92c2d40f1e8cf1c" dependencies = [ "autocfg", "hashbrown", @@ -486,6 +501,12 @@ version = "0.2.76" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "755456fae044e6fa1ebbbd1b3e902ae19e73097ed4ed87bb79934a867c007bc3" +[[package]] +name = "linked-hash-map" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8dd5a6d5999d9907cda8ed67bbd137d3af8085216c2ac62de5be860bd41f304a" + [[package]] name = "lock_api" version = "0.3.4" @@ -938,6 +959,18 @@ dependencies = [ "serde", ] +[[package]] +name = "serde_yaml" +version = "0.8.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae3e2dd40a7cdc18ca80db804b7f461a39bb721160a85c9a1fa30134bf3c02a5" +dependencies = [ + "dtoa", + "linked-hash-map", + "serde", + "yaml-rust", +] + [[package]] name = "smallvec" version = "0.6.13" @@ -1000,6 +1033,22 @@ dependencies = [ "unicode-xid", ] +[[package]] +name = "tester" +version = "0.1.0" +dependencies = [ + "Boa", + "bitflags", + "colored", + "fxhash", + "once_cell", + "regex", + "serde", + "serde_json", + "serde_yaml", + "structopt", +] + [[package]] name = "textwrap" version = "0.11.0" @@ -1198,3 +1247,12 @@ name = "winapi-x86_64-pc-windows-gnu" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "yaml-rust" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "39f0c922f1a334134dc2f7a8b67dc5d25f0735263feec974345ff706bcf20b0d" +dependencies = [ + "linked-hash-map", +] diff --git a/Cargo.toml b/Cargo.toml index 5c1ef90c6f2..a3b2b1f608b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -3,6 +3,7 @@ members = [ "boa", "boa_cli", "boa_wasm", + "tester", ] # The release profile, used for `cargo build --release`. diff --git a/boa/src/builtins/array/mod.rs b/boa/src/builtins/array/mod.rs index 58596a94593..7fd7cbc47d2 100644 --- a/boa/src/builtins/array/mod.rs +++ b/boa/src/builtins/array/mod.rs @@ -655,7 +655,10 @@ impl Array { } let search_element = args[0].clone(); - let len = this.get_field("length").as_number().unwrap() as i32; + let len = this + .get_field("length") + .as_number() + .expect("length was not a number") as i32; let mut idx = match args.get(1) { Some(from_idx_ptr) => { diff --git a/boa/src/builtins/math/tests.rs b/boa/src/builtins/math/tests.rs index 506528c6642..492e604c4aa 100644 --- a/boa/src/builtins/math/tests.rs +++ b/boa/src/builtins/math/tests.rs @@ -721,7 +721,7 @@ fn tan() { assert!(float_cmp::approx_eq!( f64, a.to_number(&mut engine).unwrap(), - f64::from(1.964_759_657_248_652_5) + 1.964_759_657_248_652_5 )); } diff --git a/boa/src/exec/tests.rs b/boa/src/exec/tests.rs index d1b3feada82..0a3620d7078 100644 --- a/boa/src/exec/tests.rs +++ b/boa/src/exec/tests.rs @@ -1303,7 +1303,7 @@ fn assignment_to_non_assignable() { for case in test_cases.iter() { let string = forward(&mut engine, case); - assert!(string.starts_with("Syntax Error: ")); + assert!(string.starts_with("Uncaught \"SyntaxError\": ")); assert!(string.contains("1:3")); } } @@ -1318,9 +1318,9 @@ fn multicharacter_assignment_to_non_assignable() { let test_cases = ["3 **= 5", "3 <<= 5", "3 >>= 5"]; for case in test_cases.iter() { - let string = forward(&mut engine, case); + let string = dbg!(forward(&mut engine, case)); - assert!(string.starts_with("Syntax Error: ")); + assert!(string.starts_with("Uncaught \"SyntaxError\": ")); assert!(string.contains("1:3")); } } @@ -1335,9 +1335,9 @@ fn multicharacter_bitwise_assignment_to_non_assignable() { let test_cases = ["3 >>>= 5", "3 &&= 5", "3 ||= 5", "3 ??= 5"]; for case in test_cases.iter() { - let string = forward(&mut engine, case); + let string = dbg!(forward(&mut engine, case)); - assert!(string.starts_with("Syntax Error: ")); + assert!(string.starts_with("Uncaught \"SyntaxError\": ")); assert!(string.contains("1:3")); } } @@ -1347,10 +1347,10 @@ fn assign_to_array_decl() { let realm = Realm::create(); let mut engine = Interpreter::new(realm); - assert!(forward(&mut engine, "[1] = [2]").starts_with("Syntax Error: ")); - assert!(forward(&mut engine, "[3, 5] = [7, 8]").starts_with("Syntax Error: ")); - assert!(forward(&mut engine, "[6, 8] = [2]").starts_with("Syntax Error: ")); - assert!(forward(&mut engine, "[6] = [2, 9]").starts_with("Syntax Error: ")); + assert!(forward(&mut engine, "[1] = [2]").starts_with("Uncaught \"SyntaxError\": ")); + assert!(forward(&mut engine, "[3, 5] = [7, 8]").starts_with("Uncaught \"SyntaxError\": ")); + assert!(forward(&mut engine, "[6, 8] = [2]").starts_with("Uncaught \"SyntaxError\": ")); + assert!(forward(&mut engine, "[6] = [2, 9]").starts_with("Uncaught \"SyntaxError\": ")); } #[test] @@ -1359,7 +1359,7 @@ fn assign_to_object_decl() { let mut engine = Interpreter::new(realm); const ERR_MSG: &str = - "expected token \';\', got \':\' in expression statement at line 1, col 3"; + "Uncaught \"SyntaxError\": \"expected token \';\', got \':\' in expression statement at line 1, col 3\""; assert_eq!(forward(&mut engine, "{a: 3} = {a: 5};"), ERR_MSG); } diff --git a/boa/src/lib.rs b/boa/src/lib.rs index 209d88725d2..246a365b9fb 100644 --- a/boa/src/lib.rs +++ b/boa/src/lib.rs @@ -59,22 +59,33 @@ pub use gc::{custom_trace, unsafe_empty_trace, Finalize, Trace}; #[must_use] pub type Result = StdResult; -fn parser_expr(src: &str) -> StdResult { - Parser::new(src.as_bytes()) - .parse_all() - .map_err(|e| e.to_string()) +/// Parses the given source code. +/// +/// It will return either the statement list AST node for the code, or a parsing error if something +/// goes wrong. +#[inline] +pub fn parse(src: &str) -> StdResult { + Parser::new(src.as_bytes()).parse_all() } /// Execute the code using an existing Interpreter /// The str is consumed and the state of the Interpreter is changed pub fn forward(engine: &mut Interpreter, src: &str) -> String { // Setup executor - let expr = match parser_expr(src) { + let expr = match parse(src) { Ok(res) => res, - Err(e) => return e, + Err(e) => { + return format!( + "Uncaught {}", + engine + .throw_syntax_error(e.to_string()) + .expect_err("interpreter.throw_syntax_error() did not return an error") + .display() + ); + } }; expr.run(engine).map_or_else( - |e| format!("Error: {}", e.display()), + |e| format!("Uncaught {}", e.display()), |v| v.display().to_string(), ) } @@ -87,13 +98,13 @@ pub fn forward(engine: &mut Interpreter, src: &str) -> String { pub fn forward_val(engine: &mut Interpreter, src: &str) -> Result { let main_timer = BoaProfiler::global().start_event("Main", "Main"); // Setup executor - let result = match parser_expr(src) { - Ok(expr) => expr.run(engine), - Err(e) => { - eprintln!("{}", e); - panic!(); - } - }; + let result = parse(src) + .map_err(|e| { + engine + .throw_syntax_error(e.to_string()) + .expect_err("interpreter.throw_syntax_error() did not return an error") + }) + .and_then(|expr| expr.run(engine)); // The main_timer needs to be dropped before the BoaProfiler is. drop(main_timer); diff --git a/boa/src/syntax/ast/position.rs b/boa/src/syntax/ast/position.rs index 5f3e832021a..1da3de12b2e 100644 --- a/boa/src/syntax/ast/position.rs +++ b/boa/src/syntax/ast/position.rs @@ -26,6 +26,7 @@ pub struct Position { impl Position { /// Creates a new `Position`. #[inline] + #[track_caller] pub fn new(line_number: u32, column_number: u32) -> Self { Self { line_number: NonZeroU32::new(line_number).expect("line number cannot be 0"), @@ -65,6 +66,7 @@ pub struct Span { impl Span { /// Creates a new `Span`. #[inline] + #[track_caller] pub fn new(start: Position, end: Position) -> Self { assert!(start <= end, "a span cannot start after its end"); diff --git a/boa/src/syntax/lexer/tests.rs b/boa/src/syntax/lexer/tests.rs index 9a7f5e9b6bb..cb01a438702 100644 --- a/boa/src/syntax/lexer/tests.rs +++ b/boa/src/syntax/lexer/tests.rs @@ -312,7 +312,7 @@ fn check_line_numbers() { #[test] fn check_decrement_advances_lexer_2_places() { // Here we want an example of decrementing an integer - let mut lexer = Lexer::new(&b"let a = b--;"[0..]); + let mut lexer = Lexer::new(&b"let a = b--;"[..]); for _ in 0..4 { lexer.next().unwrap(); @@ -333,7 +333,7 @@ fn check_decrement_advances_lexer_2_places() { #[test] fn single_int() { - let mut lexer = Lexer::new(&b"52"[0..]); + let mut lexer = Lexer::new(&b"52"[..]); let expected = [TokenKind::numeric_literal(52)]; @@ -375,7 +375,7 @@ fn numbers() { #[test] fn big_exp_numbers() { - let mut lexer = Lexer::new(&b"1.0e25 1.0e36 9.0e50"[0..]); + let mut lexer = Lexer::new(&b"1.0e25 1.0e36 9.0e50"[..]); let expected = [ TokenKind::numeric_literal(10000000000000000000000000.0), @@ -389,7 +389,7 @@ fn big_exp_numbers() { #[test] #[ignore] fn big_literal_numbers() { - let mut lexer = Lexer::new(&b"10000000000000000000000000"[0..]); + let mut lexer = Lexer::new(&b"10000000000000000000000000"[..]); let expected = [TokenKind::numeric_literal(10000000000000000000000000.0)]; @@ -398,7 +398,7 @@ fn big_literal_numbers() { #[test] fn implicit_octal_edge_case() { - let mut lexer = Lexer::new(&b"044.5 094.5"[0..]); + let mut lexer = Lexer::new(&b"044.5 094.5"[..]); let expected = [ TokenKind::numeric_literal(36), @@ -412,7 +412,7 @@ fn implicit_octal_edge_case() { #[test] fn hexadecimal_edge_case() { - let mut lexer = Lexer::new(&b"0xffff.ff 0xffffff"[0..]); + let mut lexer = Lexer::new(&b"0xffff.ff 0xffffff"[..]); let expected = [ TokenKind::numeric_literal(0xffff), @@ -426,7 +426,7 @@ fn hexadecimal_edge_case() { #[test] fn single_number_without_semicolon() { - let mut lexer = Lexer::new(&b"1"[0..]); + let mut lexer = Lexer::new(&b"1"[..]); if let Some(x) = lexer.next().unwrap() { assert_eq!(x.kind(), &TokenKind::numeric_literal(Numeric::Integer(1))); } else { @@ -436,7 +436,7 @@ fn single_number_without_semicolon() { #[test] fn number_followed_by_dot() { - let mut lexer = Lexer::new(&b"1.."[0..]); + let mut lexer = Lexer::new(&b"1.."[..]); let expected = [ TokenKind::numeric_literal(1), @@ -448,7 +448,7 @@ fn number_followed_by_dot() { #[test] fn regex_literal() { - let mut lexer = Lexer::new(&b"/(?:)/"[0..]); + let mut lexer = Lexer::new(&b"/(?:)/"[..]); let expected = [TokenKind::regular_expression_literal( "(?:)", @@ -460,7 +460,7 @@ fn regex_literal() { #[test] fn regex_literal_flags() { - let mut lexer = Lexer::new(&br"/\/[^\/]*\/*/gmi"[0..]); + let mut lexer = Lexer::new(&br"/\/[^\/]*\/*/gmi"[..]); let mut flags = RegExpFlags::default(); flags.insert(RegExpFlags::GLOBAL); @@ -477,7 +477,7 @@ fn regex_literal_flags() { #[test] fn addition_no_spaces() { - let mut lexer = Lexer::new(&b"1+1"[0..]); + let mut lexer = Lexer::new(&b"1+1"[..]); let expected = [ TokenKind::numeric_literal(1), @@ -490,7 +490,7 @@ fn addition_no_spaces() { #[test] fn addition_no_spaces_left_side() { - let mut lexer = Lexer::new(&b"1+ 1"[0..]); + let mut lexer = Lexer::new(&b"1+ 1"[..]); let expected = [ TokenKind::numeric_literal(1), @@ -503,7 +503,7 @@ fn addition_no_spaces_left_side() { #[test] fn addition_no_spaces_right_side() { - let mut lexer = Lexer::new(&b"1 +1"[0..]); + let mut lexer = Lexer::new(&b"1 +1"[..]); let expected = [ TokenKind::numeric_literal(1), @@ -516,7 +516,7 @@ fn addition_no_spaces_right_side() { #[test] fn addition_no_spaces_e_number_left_side() { - let mut lexer = Lexer::new(&b"1e2+ 1"[0..]); + let mut lexer = Lexer::new(&b"1e2+ 1"[..]); let expected = [ TokenKind::numeric_literal(100), @@ -529,7 +529,7 @@ fn addition_no_spaces_e_number_left_side() { #[test] fn addition_no_spaces_e_number_right_side() { - let mut lexer = Lexer::new(&b"1 +1e3"[0..]); + let mut lexer = Lexer::new(&b"1 +1e3"[..]); let expected = [ TokenKind::numeric_literal(1), @@ -542,7 +542,7 @@ fn addition_no_spaces_e_number_right_side() { #[test] fn addition_no_spaces_e_number() { - let mut lexer = Lexer::new(&b"1e3+1e11"[0..]); + let mut lexer = Lexer::new(&b"1e3+1e11"[..]); let expected = [ TokenKind::numeric_literal(1000), @@ -555,7 +555,7 @@ fn addition_no_spaces_e_number() { #[test] fn take_while_pred_simple() { - let mut cur = Cursor::new(&b"abcdefghijk"[0..]); + let mut cur = Cursor::new(&b"abcdefghijk"[..]); let mut buf: String = String::new(); @@ -567,7 +567,7 @@ fn take_while_pred_simple() { #[test] fn take_while_pred_immediate_stop() { - let mut cur = Cursor::new(&b"abcdefghijk"[0..]); + let mut cur = Cursor::new(&b"abcdefghijk"[..]); let mut buf: String = String::new(); @@ -578,7 +578,7 @@ fn take_while_pred_immediate_stop() { #[test] fn take_while_pred_entire_str() { - let mut cur = Cursor::new(&b"abcdefghijk"[0..]); + let mut cur = Cursor::new(&b"abcdefghijk"[..]); let mut buf: String = String::new(); @@ -594,7 +594,7 @@ fn illegal_following_numeric_literal() { // be immediately followed by an IdentifierStart or DecimalDigit. // Decimal Digit - let mut lexer = Lexer::new(&b"11.6n3"[0..]); + let mut lexer = Lexer::new(&b"11.6n3"[..]); let err = lexer .next() .expect_err("DecimalDigit following NumericLiteral not rejected as expected"); @@ -605,28 +605,30 @@ fn illegal_following_numeric_literal() { } // Identifier Start - let mut lexer = Lexer::new(&b"17.4$"[0..]); - match lexer.next() { - Err(Error::Syntax(_, pos)) => assert_eq!(pos, Position::new(1, 5)), - _ => assert!( - false, - "IdentifierStart '$' following NumericLiteral not rejected as expected" - ), + let mut lexer = Lexer::new(&b"17.4$"[..]); + if let Error::Syntax(_, pos) = lexer + .next() + .expect_err("IdentifierStart '$' following NumericLiteral not rejected as expected") + { + assert_eq!(pos, Position::new(1, 5)); + } else { + panic!("invalid error type"); } - let mut lexer = Lexer::new(&b"17.4_"[0..]); - match lexer.next() { - Err(Error::Syntax(_, pos)) => assert_eq!(pos, Position::new(1, 5)), - _ => assert!( - false, - "IdentifierStart '_' following NumericLiteral not rejected as expected" - ), + let mut lexer = Lexer::new(&b"17.4_"[..]); + if let Error::Syntax(_, pos) = lexer + .next() + .expect_err("IdentifierStart '_' following NumericLiteral not rejected as expected") + { + assert_eq!(pos, Position::new(1, 5)); + } else { + panic!("invalid error type"); } } #[test] fn codepoint_with_no_braces() { - let mut lexer = Lexer::new(r#""test\uD83Dtest""#.as_bytes()); + let mut lexer = Lexer::new(&br#""test\uD83Dtest""#[..]); assert!(lexer.next().is_ok()); } @@ -635,7 +637,7 @@ fn codepoint_with_no_braces() { fn illegal_code_point_following_numeric_literal() { // Checks as per https://tc39.es/ecma262/#sec-literals-numeric-literals that a NumericLiteral cannot // be immediately followed by an IdentifierStart where the IdentifierStart - let mut lexer = Lexer::new(r#"17.4\u{{2764}}"#.as_bytes()); + let mut lexer = Lexer::new(&br#"17.4\u{{2764}}"#[..]); assert!( lexer.next().is_err(), "IdentifierStart \\u{{2764}} following NumericLiteral not rejected as expected" diff --git a/boa/src/syntax/parser/cursor/buffered_lexer/tests.rs b/boa/src/syntax/parser/cursor/buffered_lexer/tests.rs index 7a1ad1a883f..5dc16736c44 100644 --- a/boa/src/syntax/parser/cursor/buffered_lexer/tests.rs +++ b/boa/src/syntax/parser/cursor/buffered_lexer/tests.rs @@ -3,9 +3,7 @@ use crate::syntax::lexer::{Token, TokenKind}; #[test] fn peek_skip_accending() { - let buf: &[u8] = "a b c d e f g h i".as_bytes(); - - let mut cur = BufferedLexer::from(buf); + let mut cur = BufferedLexer::from(&b"a b c d e f g h i"[..]); assert_eq!( *cur.peek(0, false) @@ -53,9 +51,7 @@ fn peek_skip_accending() { #[test] fn peek_skip_next() { - let buf: &[u8] = "a b c d e f g h i".as_bytes(); - - let mut cur = BufferedLexer::from(buf); + let mut cur = BufferedLexer::from(&b"a b c d e f g h i"[..]); assert_eq!( *cur.peek(0, false) @@ -138,9 +134,7 @@ fn peek_skip_next() { #[test] fn peek_skip_next_alternating() { - let buf: &[u8] = "a b c d e f g h i".as_bytes(); - - let mut cur = BufferedLexer::from(buf); + let mut cur = BufferedLexer::from(&b"a b c d e f g h i"[..]); assert_eq!( *cur.peek(0, false) @@ -195,9 +189,7 @@ fn peek_skip_next_alternating() { #[test] fn peek_next_till_end() { - let buf: &[u8] = "a b c d e f g h i".as_bytes(); - - let mut cur = BufferedLexer::from(buf); + let mut cur = BufferedLexer::from(&b"a b c d e f g h i"[..]); loop { let peek = cur.peek(0, false).unwrap().cloned(); @@ -213,18 +205,18 @@ fn peek_next_till_end() { #[test] fn peek_skip_next_till_end() { - let mut cur = BufferedLexer::from("a b c d e f g h i".as_bytes()); + let mut cur = BufferedLexer::from(&b"a b c d e f g h i"[..]); let mut peeked: [Option; super::MAX_PEEK_SKIP + 1] = [None::, None::, None::]; loop { - for i in 0..super::MAX_PEEK_SKIP { - peeked[i] = cur.peek(i, false).unwrap().cloned(); + for (i, peek) in peeked.iter_mut().enumerate() { + *peek = cur.peek(i, false).unwrap().cloned(); } - for i in 0..super::MAX_PEEK_SKIP { - assert_eq!(cur.next(false).unwrap(), peeked[i]); + for peek in &peeked { + assert_eq!(&cur.next(false).unwrap(), peek); } if peeked[super::MAX_PEEK_SKIP - 1].is_none() { @@ -235,7 +227,7 @@ fn peek_skip_next_till_end() { #[test] fn skip_peeked_terminators() { - let mut cur = BufferedLexer::from("A \n B".as_bytes()); + let mut cur = BufferedLexer::from(&b"A \n B"[..]); assert_eq!( *cur.peek(0, false) .unwrap() diff --git a/boa/src/syntax/parser/tests.rs b/boa/src/syntax/parser/tests.rs index 0c5037a0a80..06adb296caf 100644 --- a/boa/src/syntax/parser/tests.rs +++ b/boa/src/syntax/parser/tests.rs @@ -144,14 +144,12 @@ fn comment_semi_colon_insertion() { LetDeclList::from(vec![LetDecl::new::<&str, Option>( "a", Some(Const::Int(10).into()), - ) - .into()]) + )]) .into(), LetDeclList::from(vec![LetDecl::new::<&str, Option>( "b", Some(Const::Int(20).into()), - ) - .into()]) + )]) .into(), ], ); @@ -172,14 +170,12 @@ fn multiline_comment_semi_colon_insertion() { LetDeclList::from(vec![LetDecl::new::<&str, Option>( "a", Some(Const::Int(10).into()), - ) - .into()]) + )]) .into(), LetDeclList::from(vec![LetDecl::new::<&str, Option>( "b", Some(Const::Int(20).into()), - ) - .into()]) + )]) .into(), ], ); @@ -197,14 +193,12 @@ fn multiline_comment_no_lineterminator() { LetDeclList::from(vec![LetDecl::new::<&str, Option>( "a", Some(Const::Int(10).into()), - ) - .into()]) + )]) .into(), LetDeclList::from(vec![LetDecl::new::<&str, Option>( "b", Some(Const::Int(20).into()), - ) - .into()]) + )]) .into(), ], ); @@ -225,8 +219,7 @@ fn assignment_line_terminator() { LetDeclList::from(vec![LetDecl::new::<&str, Option>( "a", Some(Const::Int(3).into()), - ) - .into()]) + )]) .into(), Assign::new(Identifier::from("a"), Const::from(5)).into(), ], @@ -239,18 +232,9 @@ fn assignment_multiline_terminator() { let a = 3; - - - a = - - - - - - 5; "#; @@ -260,8 +244,7 @@ fn assignment_multiline_terminator() { LetDeclList::from(vec![LetDecl::new::<&str, Option>( "a", Some(Const::Int(3).into()), - ) - .into()]) + )]) .into(), Assign::new(Identifier::from("a"), Const::from(5)).into(), ], diff --git a/boa_cli/Cargo.toml b/boa_cli/Cargo.toml index b830acb2005..c53045f0296 100644 --- a/boa_cli/Cargo.toml +++ b/boa_cli/Cargo.toml @@ -14,7 +14,7 @@ edition = "2018" Boa = { path = "../boa", features = ["serde"] } rustyline = "6.2.0" rustyline-derive = "0.3.1" -structopt = "0.3.16" +structopt = "0.3.17" serde_json = "1.0.57" colored = "2.0.0" regex = "1.3.9" diff --git a/boa_wasm/src/lib.rs b/boa_wasm/src/lib.rs index 158892831dd..f8a1e689fbd 100644 --- a/boa_wasm/src/lib.rs +++ b/boa_wasm/src/lib.rs @@ -1,18 +1,26 @@ -use boa::{Executable, Interpreter, Parser, Realm}; +use boa::{parse, Executable, Interpreter, Realm}; use wasm_bindgen::prelude::*; #[wasm_bindgen] pub fn evaluate(src: &str) -> Result { - let expr = Parser::new(src.as_bytes()) - .parse_all() - .map_err(|e| JsValue::from(format!("Parsing Error: {}", e)))?; - // Setup executor let realm = Realm::create(); let mut engine = Interpreter::new(realm); - // Setup executor + let expr = match parse(src) { + Ok(res) => res, + Err(e) => { + return Err(format!( + "Uncaught {}", + engine + .throw_syntax_error(e.to_string()) + .expect_err("interpreter.throw_syntax_error() did not return an error") + .display() + ) + .into()); + } + }; expr.run(&mut engine) - .map_err(|e| JsValue::from(format!("Error: {}", e.display()))) + .map_err(|e| JsValue::from(format!("Uncaught {}", e.display()))) .map(|v| v.display().to_string()) } diff --git a/test262 b/test262 new file mode 160000 index 00000000000..896994413ca --- /dev/null +++ b/test262 @@ -0,0 +1 @@ +Subproject commit 896994413cad849f470cec7757c4bb7d1b4ffc12 diff --git a/test_ignore.txt b/test_ignore.txt new file mode 100644 index 00000000000..eee9dc7bb5b --- /dev/null +++ b/test_ignore.txt @@ -0,0 +1,28 @@ +// This does not break the tester but it does iterate from 0 to u32::MAX, +// because of incorect implementation of `Array.prototype.indexOf`. +// TODO: Fix it do iterate on the elements in the array **in insertion order**, not from +// 0 to u32::MAX untill it reaches the element. +15.4.4.14-5-13 + +// New errors: +// Stack overflows: +tco-non-eval-function +tco-non-eval-global +value-tojson-array-circular +value-array-circular +value-tojson-object-circular +value-object-circular + +// This does not stack overflow, but freezes the computer: +arg-length-exceeding-integer-limit + +// These seem to run forever: +15.4.4.22-9-b-9 +15.4.4.22-7-11 +15.4.4.22-9-5 +15.4.4.22-8-b-iii-1-30 +15.4.4.22-10-3 +15.4.4.19-8-c-ii-1 +fill-string-empty +S15.4.4.10_A3_T2 +S15.4.4.10_A3_T1 \ No newline at end of file diff --git a/tester/Cargo.toml b/tester/Cargo.toml new file mode 100644 index 00000000000..b2052e79fb8 --- /dev/null +++ b/tester/Cargo.toml @@ -0,0 +1,19 @@ +[package] +name = "tester" +version = "0.1.0" +authors = ["Iban Eguia Moraza "] +edition = "2018" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +Boa = { path = "../boa" } +structopt = "0.3.17" +serde = { version = "1.0.115", features = ["derive"] } +serde_yaml = "0.8.13" +serde_json = "1.0.57" +bitflags = "1.2.1" +regex = "1.3.9" +once_cell = "1.4.1" +colored = "2.0.0" +fxhash = "0.2.1" diff --git a/tester/src/exec.rs b/tester/src/exec.rs new file mode 100644 index 00000000000..7eb1559cb44 --- /dev/null +++ b/tester/src/exec.rs @@ -0,0 +1,194 @@ +//! Execution module for the test runner. + +use super::{Harness, Outcome, Phase, SuiteResult, Test, TestFlags, TestResult, TestSuite, CLI}; +use boa::{forward_val, parse, Interpreter, Realm}; +use colored::Colorize; +use fxhash::FxHashSet; +use once_cell::sync::Lazy; +use std::{fs, panic, path::Path}; + +/// List of ignored tests. +static IGNORED: Lazy>> = Lazy::new(|| { + let path = Path::new("test_ignore.txt"); + if path.exists() { + let filtered = fs::read_to_string(path).expect("could not read test filters"); + filtered + .lines() + .filter(|line| !line.is_empty() && !line.starts_with("//")) + .map(|line| line.to_owned().into_boxed_str()) + .collect::>() + } else { + FxHashSet::default() + } +}); + +impl TestSuite { + /// Runs the test suite. + pub(crate) fn run(&self, harness: &Harness) -> SuiteResult { + if CLI.verbose() { + println!("Suite {}:", self.name); + } + + // TODO: in parallel + let suites: Vec<_> = self.suites.iter().map(|suite| suite.run(harness)).collect(); + + // TODO: in parallel + let tests: Vec<_> = self.tests.iter().map(|test| test.run(harness)).collect(); + + if CLI.verbose() { + println!(); + } + + // Count passed tests + let mut passed = 0; + let mut ignored = 0; + for test in &tests { + if let Some(true) = test.passed { + passed += 1; + } else if test.passed.is_none() { + ignored += 1; + } + } + + // Count total tests + let mut total = tests.len(); + for suite in &suites { + total += suite.total; + passed += suite.passed; + ignored += suite.ignored; + } + + if CLI.verbose() { + println!( + "Results: total: {}, passed: {}, ignored: {}, conformance: {:.2}%", + total, + passed, + ignored, + (passed as f64 / total as f64) * 100.0 + ); + } + + SuiteResult { + name: self.name.clone(), + total, + passed, + ignored, + suites, + tests: tests.into_boxed_slice(), + } + } +} + +impl Test { + /// Runs the test. + pub(crate) fn run(&self, harness: &Harness) -> TestResult { + // println!("Starting `{}`", self.name); + + let passed = if !self.flags.intersects(TestFlags::ASYNC | TestFlags::MODULE) + && !IGNORED.contains(&self.name) + { + let res = panic::catch_unwind(|| { + match self.expected_outcome { + Outcome::Positive => { + let mut passed = true; + + if self.flags.contains(TestFlags::RAW) { + let mut engine = self.set_up_env(&harness, false); + let res = forward_val(&mut engine, &self.content); + + passed = res.is_ok() + } else { + if self.flags.contains(TestFlags::STRICT) { + let mut engine = self.set_up_env(&harness, true); + let res = forward_val(&mut engine, &self.content); + + passed = res.is_ok() + } + + if passed && self.flags.contains(TestFlags::NO_STRICT) { + let mut engine = self.set_up_env(&harness, false); + let res = forward_val(&mut engine, &self.content); + + passed = res.is_ok() + } + } + + passed + } + Outcome::Negative { + phase: Phase::Parse, + ref error_type, + } => { + assert_eq!( + error_type.as_ref(), + "SyntaxError", + "non-SyntaxError parsing error found in {}", + self.name + ); + + parse(&self.content).is_err() + } + Outcome::Negative { + phase: _, + error_type: _, + } => { + // TODO: check the phase + false + } + } + }); + + let passed = res.unwrap_or_else(|_| { + eprintln!("last panic was on test \"{}\"", self.name); + false + }); + + print!("{}", if passed { ".".green() } else { ".".red() }); + + Some(passed) + } else { + // Ignoring async tests for now. + // TODO: implement async and add `harness/doneprintHandle.js` to the includes. + print!("{}", ".".yellow()); + None + }; + + TestResult { + name: self.name.clone(), + passed, + } + } + + /// Sets the environment up to run the test. + fn set_up_env(&self, harness: &Harness, strict: bool) -> Interpreter { + // Create new Realm + // TODO: in parallel. + let realm = Realm::create(); + let mut engine = Interpreter::new(realm); + + // TODO: set up the environment. + + if strict { + forward_val(&mut engine, r#""use strict";"#).expect("could not set strict mode"); + } + + forward_val(&mut engine, &harness.assert).expect("could not run assert.js"); + forward_val(&mut engine, &harness.sta).expect("could not run sta.js"); + + self.includes.iter().for_each(|include| { + let res = forward_val( + &mut engine, + &harness + .includes + .get(include) + .expect("could not find include file"), + ); + if let Err(e) = res { + eprintln!("could not run the {} include file.", include); + panic!("Uncaught {}", e.display()); + } + }); + + engine + } +} diff --git a/tester/src/main.rs b/tester/src/main.rs new file mode 100644 index 00000000000..3fb6a62cc72 --- /dev/null +++ b/tester/src/main.rs @@ -0,0 +1,303 @@ +//! Test262 test runner +//! +//! This crate will run the full ECMAScript test suite (Test262) and report compliance of the +//! `boa` engine. +#![doc( + html_logo_url = "https://raw.githubusercontent.com/jasonwilliams/boa/master/assets/logo.svg", + html_favicon_url = "https://raw.githubusercontent.com/jasonwilliams/boa/master/assets/logo.svg" +)] +#![deny( + unused_qualifications, + clippy::all, + unused_qualifications, + unused_import_braces, + unused_lifetimes, + unreachable_pub, + trivial_numeric_casts, + // rustdoc, + missing_debug_implementations, + missing_copy_implementations, + deprecated_in_future, + meta_variable_misuse, + non_ascii_idents, + rust_2018_compatibility, + rust_2018_idioms, + future_incompatible, + nonstandard_style, +)] +#![warn(clippy::perf, clippy::single_match_else, clippy::dbg_macro)] +#![allow( + clippy::missing_inline_in_public_items, + clippy::cognitive_complexity, + clippy::must_use_candidate, + clippy::missing_errors_doc, + clippy::as_conversions, + clippy::let_unit_value, + missing_doc_code_examples +)] + +mod exec; +mod read; +mod results; + +use self::{ + read::{read_global_suite, read_harness, MetaData, Negative, TestFlag}, + results::write_json, +}; +use bitflags::bitflags; +use fxhash::FxHashMap; +use once_cell::sync::Lazy; +use serde::{Deserialize, Serialize}; +use std::{ + fs, + path::{Path, PathBuf}, +}; +use structopt::StructOpt; + +/// CLI information. +static CLI: Lazy = Lazy::new(Cli::from_args); + +/// Boa test262 tester +#[derive(StructOpt, Debug)] +#[structopt(name = "Boa test262 tester")] +struct Cli { + // Whether to show verbose output. + #[structopt(short, long)] + verbose: bool, + + /// Path to the Test262 suite. + #[structopt(long, parse(from_os_str), default_value = "./test262")] + test262_path: PathBuf, + + /// Optional output folder for the full results information. + #[structopt(short, long, parse(from_os_str))] + output: Option, +} + +impl Cli { + // Whether to show verbose output. + fn verbose(&self) -> bool { + self.verbose + } + + /// Path to the Test262 suite. + fn test262_path(&self) -> &Path { + self.test262_path.as_path() + } + + /// Optional output folder for the full results information. + fn output(&self) -> Option<&Path> { + self.output.as_deref() + } +} + +/// Program entry point. +fn main() { + if let Some(path) = CLI.output() { + if path.exists() { + if !path.is_dir() { + eprintln!("The output path must be a directory."); + std::process::exit(1); + } + } else { + fs::create_dir_all(path).expect("could not create the output directory"); + } + } + + if CLI.verbose() { + println!("Loading the test suite..."); + } + let harness = read_harness().expect("could not read initialization bindings"); + + let global_suite = read_global_suite().expect("could not get the list of tests to run"); + + if CLI.verbose() { + println!("Test suite loaded, starting tests..."); + } + let results = global_suite.run(&harness); + println!(); + + if CLI.verbose() { + println!("Results:"); + println!("Total tests: {}", results.total); + println!("Passed tests: {}", results.passed); + println!( + "Conformance: {:.2}%", + (results.passed as f64 / results.total as f64) * 100.0 + ) + } + + write_json(results).expect("could not write the results to the output JSON file"); +} + +/// All the harness include files. +#[derive(Debug, Clone)] +struct Harness { + assert: Box, + sta: Box, + includes: FxHashMap, Box>, +} + +/// Represents a test suite. +#[derive(Debug, Clone)] +struct TestSuite { + name: Box, + suites: Box<[TestSuite]>, + tests: Box<[Test]>, +} + +/// Outcome of a test suite. +#[derive(Debug, Clone, Serialize, Deserialize)] +struct SuiteResult { + name: Box, + total: usize, + passed: usize, + ignored: usize, + #[serde(skip_serializing_if = "Vec::is_empty")] + suites: Vec, + tests: Box<[TestResult]>, +} + +/// Outcome of a test. +#[derive(Debug, Clone, Serialize, Deserialize)] +struct TestResult { + name: Box, + passed: Option, +} + +/// Represents a test. +#[derive(Debug, Clone)] +struct Test { + name: Box, + description: Box, + esid: Option>, + flags: TestFlags, + information: Box, + features: Box<[Box]>, + expected_outcome: Outcome, + includes: Box<[Box]>, + locale: Locale, + content: Box, +} + +impl Test { + /// Creates a new test. + #[inline] + fn new(name: N, content: C, metadata: MetaData) -> Self + where + N: Into>, + C: Into>, + { + Self { + name: name.into(), + description: metadata.description, + esid: metadata.esid, + flags: metadata.flags.into(), + information: metadata.info, + features: metadata.features, + expected_outcome: Outcome::from(metadata.negative), + includes: metadata.includes, + locale: metadata.locale, + content: content.into(), + } + } +} + +/// An outcome for a test. +#[derive(Debug, Clone)] +enum Outcome { + Positive, + Negative { phase: Phase, error_type: Box }, +} + +impl Default for Outcome { + fn default() -> Self { + Self::Positive + } +} + +impl From> for Outcome { + fn from(neg: Option) -> Self { + neg.map(|neg| Self::Negative { + phase: neg.phase, + error_type: neg.error_type, + }) + .unwrap_or_default() + } +} + +bitflags! { + struct TestFlags: u16 { + const STRICT = 0b000000001; + const NO_STRICT = 0b000000010; + const MODULE = 0b000000100; + const RAW = 0b000001000; + const ASYNC = 0b000010000; + const GENERATED = 0b000100000; + const CAN_BLOCK_IS_FALSE = 0b001000000; + const CAN_BLOCK_IS_TRUE = 0b010000000; + const NON_DETERMINISTIC = 0b100000000; + } +} + +impl Default for TestFlags { + fn default() -> Self { + Self::STRICT | Self::NO_STRICT + } +} + +impl From for TestFlags { + fn from(flag: TestFlag) -> Self { + match flag { + TestFlag::OnlyStrict => Self::STRICT, + TestFlag::NoStrict => Self::NO_STRICT, + TestFlag::Module => Self::MODULE, + TestFlag::Raw => Self::RAW, + TestFlag::Async => Self::ASYNC, + TestFlag::Generated => Self::GENERATED, + TestFlag::CanBlockIsFalse => Self::CAN_BLOCK_IS_FALSE, + TestFlag::CanBlockIsTrue => Self::CAN_BLOCK_IS_TRUE, + TestFlag::NonDeterministic => Self::NON_DETERMINISTIC, + } + } +} + +impl From for TestFlags +where + T: AsRef<[TestFlag]>, +{ + fn from(flags: T) -> Self { + let flags = flags.as_ref(); + if flags.is_empty() { + Self::default() + } else { + let mut result = Self::empty(); + for flag in flags { + result |= Self::from(*flag); + } + + if !result.intersects(Self::default()) { + result |= Self::default() + } + + result + } + } +} + +/// Phase for an error. +#[derive(Debug, Clone, Copy, Deserialize)] +#[serde(rename_all = "lowercase")] +enum Phase { + Parse, + Early, + Resolution, + Runtime, +} + +/// Locale information structure. +#[derive(Debug, Default, Clone, Deserialize)] +#[serde(transparent)] +struct Locale { + locale: Box<[Box]>, +} diff --git a/tester/src/read.rs b/tester/src/read.rs new file mode 100644 index 00000000000..739cb800c89 --- /dev/null +++ b/tester/src/read.rs @@ -0,0 +1,257 @@ +//! Module to read the list of test suites from disk. + +use super::{Harness, Locale, Phase, Test, TestSuite, CLI}; +use fxhash::FxHashMap; +use serde::{Deserialize, Serialize}; +use std::{fs, io, path::Path}; + +/// Representation of the YAML metadata in Test262 tests. +#[derive(Debug, Clone, Deserialize)] +pub(super) struct MetaData { + pub(super) description: Box, + pub(super) esid: Option>, + pub(super) es5id: Option>, + pub(super) es6id: Option>, + #[serde(default)] + pub(super) info: Box, + #[serde(default)] + pub(super) features: Box<[Box]>, + #[serde(default)] + pub(super) includes: Box<[Box]>, + #[serde(default)] + pub(super) flags: Box<[TestFlag]>, + #[serde(default)] + pub(super) negative: Option, + #[serde(default)] + pub(super) locale: Locale, +} + +/// Negative test information structure. +#[derive(Debug, Clone, Deserialize)] +pub(super) struct Negative { + pub(super) phase: Phase, + #[serde(rename = "type")] + pub(super) error_type: Box, +} + +/// Individual test flag. +#[derive(Debug, Clone, Copy, Deserialize)] +#[serde(rename_all = "camelCase")] +pub(super) enum TestFlag { + OnlyStrict, + NoStrict, + Module, + Raw, + Async, + Generated, + #[serde(rename = "CanBlockIsFalse")] + CanBlockIsFalse, + #[serde(rename = "CanBlockIsTrue")] + CanBlockIsTrue, + #[serde(rename = "non-deterministic")] + NonDeterministic, +} + +/// Test information structure. +#[derive(Debug, Clone, Serialize, Deserialize)] +struct TestInfo { + desc: Box, + info: Box, +} + +impl TestInfo { + /// Creates a test information structure from the full metadata. + fn from_metadata(metadata: &MetaData) -> Self { + Self { + desc: metadata.description.trim().to_owned().into_boxed_str(), + info: metadata.info.trim().to_owned().into_boxed_str(), + } + } +} + +/// Name of the "test information" file. +const INFO_FILE_NAME: &str = "info.json"; + +/// Reads the Test262 defined bindings. +pub(super) fn read_harness() -> io::Result { + let mut includes = FxHashMap::default(); + + for entry in fs::read_dir(CLI.test262_path().join("harness"))? { + let entry = entry?; + let file_name = entry.file_name(); + let file_name = file_name.to_string_lossy(); + + if file_name == "assert.js" || file_name == "sta.js" { + continue; + } + + let content = fs::read_to_string(entry.path())?; + + includes.insert( + file_name.into_owned().into_boxed_str(), + content.into_boxed_str(), + ); + } + let assert = fs::read_to_string(CLI.test262_path().join("harness/assert.js"))?.into_boxed_str(); + let sta = fs::read_to_string(CLI.test262_path().join("harness/sta.js"))?.into_boxed_str(); + + Ok(Harness { + assert, + sta, + includes, + }) +} + +/// Reads the global suite from disk. +pub(super) fn read_global_suite() -> io::Result { + let path = CLI.test262_path().join("test"); + + let mut info = if let Some(path) = CLI.output() { + let path = path.join(INFO_FILE_NAME); + if path.exists() { + Some(serde_json::from_reader(io::BufReader::new( + fs::File::open(path)?, + ))?) + } else { + Some(FxHashMap::default()) + } + } else { + None + }; + + let suite = read_suite(path.as_path(), &mut info)?; + + if let (Some(path), info) = (CLI.output(), info) { + let path = path.join(INFO_FILE_NAME); + if CLI.verbose() { + println!("Writing the test information file at {}...", path.display()); + } + + let output = io::BufWriter::new(fs::File::create(path)?); + serde_json::to_writer(output, &info)?; + + if CLI.verbose() { + println!("Test information file written."); + } + } + + Ok(suite) +} + +/// Reads a test suite in the given path. +fn read_suite( + path: &Path, + test_info: &mut Option, TestInfo>>, +) -> io::Result { + use std::ffi::OsStr; + + let name = path + .file_stem() + .ok_or_else(|| { + io::Error::new( + io::ErrorKind::InvalidInput, + format!("test suite with no name found: {}", path.display()), + ) + })? + .to_str() + .ok_or_else(|| { + io::Error::new( + io::ErrorKind::InvalidInput, + format!("non-UTF-8 suite name found: {}", path.display()), + ) + })?; + + let mut suites = Vec::new(); + let mut tests = Vec::new(); + + let filter = |st: &OsStr| { + st.to_string_lossy().ends_with("_FIXTURE.js") + // TODO: see if we can fix this. + || st.to_string_lossy() == "line-terminator-normalisation-CR.js" + }; + + // TODO: iterate in parallel + for entry in path.read_dir()? { + let entry = entry?; + + if entry.file_type()?.is_dir() { + suites.push(read_suite(entry.path().as_path(), test_info)?); + } else if filter(&entry.file_name()) { + continue; + } else { + tests.push(read_test(entry.path().as_path(), test_info)?); + } + } + + Ok(TestSuite { + name: name.into(), + suites: suites.into_boxed_slice(), + tests: tests.into_boxed_slice(), + }) +} + +/// Reads information about a given test case. +fn read_test( + path: &Path, + test_info: &mut Option, TestInfo>>, +) -> io::Result { + let name = path + .file_stem() + .ok_or_else(|| { + io::Error::new( + io::ErrorKind::InvalidInput, + format!("test with no file name found: {}", path.display()), + ) + })? + .to_str() + .ok_or_else(|| { + io::Error::new( + io::ErrorKind::InvalidInput, + format!("non-UTF-8 file name found: {}", path.display()), + ) + })?; + + let content = fs::read_to_string(path)?; + + let metadata = read_metadata(&content)?; + + if let Some(all_info) = test_info { + let path_str = path + .strip_prefix(CLI.test262_path()) + .expect("could not get test path string") + .to_str() + .ok_or_else(|| { + io::Error::new( + io::ErrorKind::InvalidInput, + format!("non-UTF-8 path found: {}", path.display()), + ) + })?; + + let new_info = TestInfo::from_metadata(&metadata); + + let _ = all_info.insert(path_str.to_owned().into_boxed_str(), new_info); + } + + Ok(Test::new(name, content, metadata)) +} + +/// Reads the metadata from the input test code. +fn read_metadata(code: &str) -> io::Result { + use once_cell::sync::Lazy; + use regex::Regex; + + /// Regular expression to retrieve the metadata of a test. + static META_REGEX: Lazy = Lazy::new(|| { + Regex::new(r#"/\*\-{3}((?:.|\n)*)\-{3}\*/"#) + .expect("could not compile metadata regular expression") + }); + + let yaml = META_REGEX + .captures(code) + .ok_or_else(|| io::Error::new(io::ErrorKind::InvalidData, "no metadata found"))? + .get(1) + .ok_or_else(|| io::Error::new(io::ErrorKind::InvalidData, "no metadata found"))? + .as_str(); + + serde_yaml::from_str(yaml).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e)) +} diff --git a/tester/src/results.rs b/tester/src/results.rs new file mode 100644 index 00000000000..19d98535b82 --- /dev/null +++ b/tester/src/results.rs @@ -0,0 +1,97 @@ +use super::{SuiteResult, CLI}; +use serde::{Deserialize, Serialize}; +use std::{ + env, fs, + io::{self, BufReader, BufWriter}, +}; + +/// Structure to store full result information. +#[derive(Debug, Clone, Deserialize, Serialize)] +struct ResultInfo { + commit: Box, + results: SuiteResult, +} + +/// Structure to store full result information. +#[derive(Debug, Clone, Deserialize, Serialize)] +struct ReducedResultInfo { + commit: Box, + total: usize, + passed: usize, + ignored: usize, +} + +impl From for ReducedResultInfo { + /// Creates a new reduced suite result from a full suite result. + fn from(info: ResultInfo) -> Self { + Self { + commit: info.commit, + total: info.results.total, + passed: info.results.passed, + ignored: info.results.ignored, + } + } +} + +/// File name of the "latest results" JSON file. +const LATEST_FILE_NAME: &str = "latest.json"; + +/// File name of the "all results" JSON file. +const RESULTS_FILE_NAME: &str = "results.json"; + +/// Writes the results of running the test suite to the given JSON output file. +/// +/// It will append the results to the ones already present, in an array. +pub(crate) fn write_json(results: SuiteResult) -> io::Result<()> { + if let Some(path) = CLI.output() { + let mut branch = env::var("GITHUB_REF").unwrap_or_default(); + if branch.starts_with("refs/pull") { + branch = "pull".to_owned(); + } + + let path = if branch.is_empty() { + path.to_path_buf() + } else { + let folder = path.join(branch); + fs::create_dir_all(&folder)?; + folder + }; + + if CLI.verbose() { + println!("Writing the results to {}...", path.display()); + } + + // Write the latest results. + + let latest_path = path.join(LATEST_FILE_NAME); + + let new_results = ResultInfo { + commit: env::var("GITHUB_SHA").unwrap_or_default().into_boxed_str(), + results, + }; + + let latest_output = BufWriter::new(fs::File::create(latest_path)?); + serde_json::to_writer(latest_output, &new_results)?; + + // Write the full list of results, retrieving the existing ones first. + + let all_path = path.join(RESULTS_FILE_NAME); + + let mut all_results: Vec = if all_path.exists() { + serde_json::from_reader(BufReader::new(fs::File::open(&all_path)?))? + } else { + Vec::new() + }; + + all_results.push(new_results.into()); + + let output = BufWriter::new(fs::File::create(&all_path)?); + serde_json::to_writer(output, &all_results)?; + + if CLI.verbose() { + println!("Results written correctly"); + } + } + + Ok(()) +} From 4a8b6401099600ebca295f47e5d2e91771fb7573 Mon Sep 17 00:00:00 2001 From: Iban Eguia Moraza Date: Wed, 2 Sep 2020 16:06:20 +0200 Subject: [PATCH 2/6] Upgraded dependencies --- boa/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/boa/Cargo.toml b/boa/Cargo.toml index cebba46a1d6..23b331b0f21 100644 --- a/boa/Cargo.toml +++ b/boa/Cargo.toml @@ -23,7 +23,7 @@ rustc-hash = "1.1.0" num-bigint = { version = "0.3.0", features = ["serde"] } num-integer = "0.1.43" bitflags = "1.2.1" -indexmap = "1.5.1" +indexmap = "1.5.2" ryu-js = "0.2.0" chrono = "0.4.15" From d17f8440c09466bef6845a4060e65171a7c6b81c Mon Sep 17 00:00:00 2001 From: Iban Eguia Moraza Date: Wed, 2 Sep 2020 16:12:53 +0200 Subject: [PATCH 3/6] Renamed the tester crate to "boa_tester" and added some more Cargo.toml metadata --- Cargo.lock | 32 ++++++++++++++++---------------- tester/Cargo.toml | 12 ++++++++---- 2 files changed, 24 insertions(+), 20 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 3bdfe7271c1..b681d972866 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -109,6 +109,22 @@ dependencies = [ "structopt", ] +[[package]] +name = "boa_tester" +version = "0.9.0" +dependencies = [ + "Boa", + "bitflags", + "colored", + "fxhash", + "once_cell", + "regex", + "serde", + "serde_json", + "serde_yaml", + "structopt", +] + [[package]] name = "boa_wasm" version = "0.9.0" @@ -1033,22 +1049,6 @@ dependencies = [ "unicode-xid", ] -[[package]] -name = "tester" -version = "0.1.0" -dependencies = [ - "Boa", - "bitflags", - "colored", - "fxhash", - "once_cell", - "regex", - "serde", - "serde_json", - "serde_yaml", - "structopt", -] - [[package]] name = "textwrap" version = "0.11.0" diff --git a/tester/Cargo.toml b/tester/Cargo.toml index b2052e79fb8..bd57cecef9e 100644 --- a/tester/Cargo.toml +++ b/tester/Cargo.toml @@ -1,11 +1,15 @@ [package] -name = "tester" -version = "0.1.0" +name = "boa_tester" +version = "0.9.0" authors = ["Iban Eguia Moraza "] +description = "Boa is a Javascript lexer, parser and Just-in-Time compiler written in Rust. Currently, it has support for some of the language." +repository = "https://github.com/boa-dev/boa" +keywords = ["javascript", "compiler", "test262", "tester", "js"] +categories = ["parser-implementations", "wasm"] +license = "Unlicense/MIT" +exclude = ["../.vscode/*", "../Dockerfile", "../Makefile", "../.editorConfig"] edition = "2018" -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - [dependencies] Boa = { path = "../boa" } structopt = "0.3.17" From 56e6e289b9d36556508f13ed52bf90e0a9b41fef Mon Sep 17 00:00:00 2001 From: Iban Eguia Moraza Date: Wed, 2 Sep 2020 17:08:06 +0200 Subject: [PATCH 4/6] Improved error formatting in CLI --- boa_cli/src/main.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/boa_cli/src/main.rs b/boa_cli/src/main.rs index 03b6efcec58..adfb999e41d 100644 --- a/boa_cli/src/main.rs +++ b/boa_cli/src/main.rs @@ -152,8 +152,8 @@ pub fn main() -> Result<(), std::io::Error> { } } else { match forward_val(&mut engine, &buffer) { - Ok(v) => print!("{}", v.display()), - Err(v) => eprint!("{}", v.display()), + Ok(v) => println!("{}", v.display()), + Err(v) => eprintln!("Uncaught {}", v.display()), } } } From 8894a6a489a443ddc98bb32b8ab3d73fcf5e5c4d Mon Sep 17 00:00:00 2001 From: Iban Eguia Moraza Date: Thu, 3 Sep 2020 11:30:26 +0200 Subject: [PATCH 5/6] Fixed the new name of the tester binary in documentation and CI --- .github/workflows/test262.yml | 2 +- CONTRIBUTING.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/test262.yml b/.github/workflows/test262.yml index ef1eb3e6198..9346ddfffe4 100644 --- a/.github/workflows/test262.yml +++ b/.github/workflows/test262.yml @@ -50,7 +50,7 @@ jobs: - run: mkdir -p gh-pages/test262 - name: Run the test262 test suite - run: cargo run --release --bin tester -- -o gh-pages/test262 + run: cargo run --release --bin boa_tester -- -o gh-pages/test262 - name: Commit files if: github.event_name == 'push' diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 0a33f259d89..1c35eeeea16 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -72,7 +72,7 @@ suite, you can just run the normal `cargo test`, and to run the full ECMAScript with this command: ``` -cargo run --release --bin tester -- -v 2> error.log +cargo run --release --bin boa_tester -- -v 2> error.log ``` Note that this requires the `test262` submodule to be checked out, so you will need to run the following first: From 7175ac865359ae05ca36aab1e4a2f17c1ae4fa27 Mon Sep 17 00:00:00 2001 From: Iban Eguia Moraza Date: Thu, 3 Sep 2020 11:39:13 +0200 Subject: [PATCH 6/6] Fixing the prettier checker --- .github/workflows/webassembly.yml | 7 ++----- .prettierignore | 2 ++ 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/.github/workflows/webassembly.yml b/.github/workflows/webassembly.yml index 0dce43ab52f..4b223201f69 100644 --- a/.github/workflows/webassembly.yml +++ b/.github/workflows/webassembly.yml @@ -16,10 +16,7 @@ jobs: - name: Checkout uses: actions/checkout@v2 - name: Check code formatting - uses: creyD/prettier_action@v3.0 - with: - dry: true - prettier_options: --check . + run: npx prettier --check . build: name: Build webassembly demo @@ -34,7 +31,7 @@ jobs: override: true profile: minimal - name: Install wasm-pack - run: curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh + run: curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh - name: Cache yarn build uses: actions/cache@v2 with: diff --git a/.prettierignore b/.prettierignore index e71eabb274b..981ec730007 100644 --- a/.prettierignore +++ b/.prettierignore @@ -6,3 +6,5 @@ boa/benches/bench_scripts/mini_js.js boa/benches/bench_scripts/clean_js.js boa_wasm/pkg dist +test262 +tests/js/test.js