From eef923d124424ea7c7bcd205c89d60ae07771a23 Mon Sep 17 00:00:00 2001 From: dcodeIO Date: Fri, 16 Mar 2018 15:45:18 +0100 Subject: [PATCH] Clean up tests directory a bit --- tests/README.md | 18 +++++++++++------- tests/bundled-asc.js | 43 ------------------------------------------- tests/i64.ts | 37 ------------------------------------- tests/parser.js | 9 +++------ tests/path.ts | 8 -------- tests/tokenizer.js | 30 ++++++++++++++++++++++++++++++ tests/tokenizer.ts | 23 ----------------------- tests/util-path.js | 12 ++++++++++++ 8 files changed, 56 insertions(+), 124 deletions(-) delete mode 100644 tests/bundled-asc.js delete mode 100644 tests/i64.ts delete mode 100644 tests/path.ts create mode 100644 tests/tokenizer.js delete mode 100644 tests/tokenizer.ts create mode 100644 tests/util-path.js diff --git a/tests/README.md b/tests/README.md index b368fd53..f1def803 100644 --- a/tests/README.md +++ b/tests/README.md @@ -7,14 +7,14 @@ consists of: * A test file that is parsed or compiled (.ts) * One or multiple automatically generated fixtures generated from the source file -Creating a test: +### Creating a test: * Run `npm run clean` to make sure that the sources are tested instead of the distribution * Create a new test file (.ts) within the respective directory (see below) that contains your test code * Follow the instructions below to generate the first fixture(s) * Make sure the fixture(s) contain exactly what you'd expect -Updating a test: +### Updating a test: * Run `npm run clean` to make sure that the sources are tested instead of the distribution * Make changes to the respective test file (.ts) @@ -54,11 +54,11 @@ Note that the parser suite currently can't recreate just a specific fixture. Compiler -------- -General directory: [tests/compiler](./compiler) +General directory: [tests/compiler](./compiler)
Standard library directory: [tests/compiler/std](./compiler/std) -The source file is parsed and compiled to a module, validated, interpreted and the resulting module -converted to WebAsssembly text format. +The source file is parsed and compiled to a module, validated and the resulting module converted to +WebAsssembly text format. The text format output is compared to its fixture and the module interpreted in a WebAssembly VM. To assert for runtime conditions, the `assert` builtin can be used. Note that tree-shaking is enabled @@ -94,5 +94,9 @@ $> npm run test:compiler -- testNameWithoutTs --create Other ----- -Tests in other directories are not run automatically and do not need to be updated. Most of them -are legacy tests. +Tests in other directories are not run automatically and do not need to be updated. + +* [tests/allocators](./allocators) contains the memory allocator test suite +* [tests/binaryen](./binaryen) contains various triggers for earlier Binaryen issues +* [tests/tokenizer](./tokenizer.js) is a visual test for the tokenizer tokenizing itself +* [tests/util-path](./util-path.js) is a sanity test for the path utility diff --git a/tests/bundled-asc.js b/tests/bundled-asc.js deleted file mode 100644 index 7e78aae4..00000000 --- a/tests/bundled-asc.js +++ /dev/null @@ -1,43 +0,0 @@ -var asc = require("../dist/asc.js"); - -console.log(Object.keys(asc)); - -var stdout = asc.createMemoryStream(); -var stderr = asc.createMemoryStream(); -var stats = asc.createStats(); - -process.exitCode = asc.main([ - "test.ts", - "--validate", - "--optimize", - "--measure", - "--textFile", // -> stdout - "--binaryFile", "test.wasm", - "--sourceMap" -], { - stdout: stdout, - stderr: stderr, - stats: stats, - readFile: function(filename) { - console.log("<< readFile: " + filename); - if (filename === "/test.ts") { // sic: browser path - return "export function foo(): void {}"; - } - throw Error("File not found: " + filename); - }, - writeFile: function(filename, contents) { - console.log(">> writeFile: " + filename + " (" + contents.length + " bytes)"); - }, - listFiles: function(dirname) { - console.log("<< listFiles: " + dirname); - return []; - } -}); - -console.log(">> stdout >>"); -console.log(stdout.toString()); - -console.log(">> stderr >>"); -console.error(stderr.toString()); - -console.log(">> stats >>", stats); diff --git a/tests/i64.ts b/tests/i64.ts deleted file mode 100644 index b6a76832..00000000 --- a/tests/i64.ts +++ /dev/null @@ -1,37 +0,0 @@ -import { I64 } from "../src/util/i64"; -import * as Long from "long"; -import * as assert from "assert"; - -function test(fn, lo, hi, otherLo, otherHi) { - let expected = Long.fromBits(lo, hi)[fn](Long.fromBits(otherLo, otherHi)); - let actual = new I64(lo, hi); actual[fn + "32"](otherLo, otherHi); - assert.equal(actual.lo, expected.low, fn + " lo "); - assert.equal(actual.hi, expected.high, fn + " hi"); -} - -function rand() { - let r = Math.random(); - // 10% edge cases - if (r < 0.05) return 0x80000000 | 0; - else if (r < 0.1) return 0; - return (Math.random() * 0xffffffff) | 0; -} - -let i = 0; -while (i++ < 1000000) { - let lo = rand(); - let hi = rand(); - let otherLo = rand(); - let otherHi = rand(); - // console.log(lo, hi, otherLo, otherHi); - test("add", lo, hi, otherLo, otherHi); - test("sub", lo, hi, otherLo, otherHi); - test("mul", lo, hi, otherLo, otherHi); - test("shl", lo, hi, otherLo, otherHi); - test("shr", lo, hi, otherLo, otherHi); - test("shru", lo, hi, otherLo, otherHi); - test("and", lo, hi, otherLo, otherHi); - test("or", lo, hi, otherLo, otherHi); - test("xor", lo, hi, otherLo, otherHi); -} -console.log("done"); diff --git a/tests/parser.js b/tests/parser.js index 9ed287e3..78e68d47 100644 --- a/tests/parser.js +++ b/tests/parser.js @@ -7,7 +7,7 @@ require("ts-node").register({ project: require("path").join(__dirname, "..", "sr require("../src/glue/js"); var Parser = require("../src/parser").Parser; -var serializeSource = require("../src/extra/ast").serializeSource; +var ASTBuilder = require("../src/extra/ast").ASTBuilder; var isCreate = process.argv[2] === "--create"; var filter = process.argv.length > 2 && !isCreate ? "*" + process.argv[2] + "*.ts" : "**.ts"; @@ -21,13 +21,10 @@ glob.sync(filter, { cwd: __dirname + "/parser" }).forEach(filename => { var failed = false; var parser = new Parser(); - // parser.silentDiagnostics = true; var sourceText = fs.readFileSync(__dirname + "/parser/" + filename, { encoding: "utf8" }).replace(/\r?\n/g, "\n").replace(/^\/\/.*\r?\n/mg, ""); parser.parseFile(sourceText, filename, true); - - var sb = []; - serializeSource(parser.program.sources[0], sb); - var actual = sb.join("") + parser.diagnostics.map(diagnostic => "// " + diagnostic + "\n").join(""); + var serializedSourceText = ASTBuilder.build(parser.program.sources[0]); + var actual = serializedSourceText + parser.diagnostics.map(diagnostic => "// " + diagnostic + "\n").join(""); var fixture = filename + ".fixture.ts"; if (isCreate) { diff --git a/tests/path.ts b/tests/path.ts deleted file mode 100644 index d0c04505..00000000 --- a/tests/path.ts +++ /dev/null @@ -1,8 +0,0 @@ -import { normalizePath, resolvePath } from "../src/util"; -import * as path from "path"; - -let test = "./Y/./N/./N/../../././../Y/./."; -console.log(normalizePath(test)); -console.log(path.posix.normalize(test)); - -console.log(resolvePath("../../..", "lib/util/i64.ts")); diff --git a/tests/tokenizer.js b/tests/tokenizer.js new file mode 100644 index 00000000..8b42621c --- /dev/null +++ b/tests/tokenizer.js @@ -0,0 +1,30 @@ +const fs = require("fs"); +const path = require("path"); + +require("ts-node").register({ project: path.join(__dirname, "..", "src", "tsconfig.json") }); +require("../src/glue/js"); + +const { Tokenizer, Token } = require("../src/tokenizer"); +const { Source, SourceKind } = require("../src/ast"); + +const text = fs.readFileSync(__dirname + "/../src/tokenizer.ts").toString(); +const tn = new Tokenizer(new Source("compiler.ts", text, SourceKind.ENTRY)); + +do { + let token = tn.next(); + if (token == Token.IDENTIFIER) { + console.log(Token[token] + " > " + tn.readIdentifier()); + } else if (token == Token.INTEGERLITERAL) { + console.log(Token[token] + " > " + tn.readInteger()); + } else if (token == Token.FLOATLITERAL) { + console.log(Token[token] + " > " + tn.readFloat()); + } else if (token == Token.STRINGLITERAL) { + console.log(Token[token] + " > " + tn.readString()); + } else if (token == Token.ENDOFFILE) { + console.log(Token[token]); + break; + } else { + let range = tn.range(); + console.log(Token[token] + " > " + range.source.text.substring(range.start, range.end)); + } +} while (true); diff --git a/tests/tokenizer.ts b/tests/tokenizer.ts deleted file mode 100644 index ad147c9a..00000000 --- a/tests/tokenizer.ts +++ /dev/null @@ -1,23 +0,0 @@ -import "../src/glue/js"; -import { Tokenizer, Token } from "../src/tokenizer"; -import { Source } from "../src/ast"; -import * as fs from "fs"; - -const text = fs.readFileSync(__dirname + "/../src/tokenizer.ts").toString(); - -const tn = new Tokenizer(new Source("tokenizer.ts", text)); - -let token; -do { - token = tn.next(); - let range = tn.range(); - console.log(Token[token] + " -> " + range.source.text.substring(range.start, range.end)); - if (token == Token.IDENTIFIER) - console.log("> " + tn.readIdentifier()); - else if (token == Token.INTEGERLITERAL) - console.log("> " + tn.readInteger()); - else if (token == Token.FLOATLITERAL) - console.log("> " + tn.readFloat()); - else if (token == Token.STRINGLITERAL) - console.log("> " + tn.readString()); -} while (token != Token.ENDOFFILE); diff --git a/tests/util-path.js b/tests/util-path.js new file mode 100644 index 00000000..d3324f2d --- /dev/null +++ b/tests/util-path.js @@ -0,0 +1,12 @@ +const path = require("path"); +const assert = require("assert"); + +require("ts-node").register({ project: path.join(__dirname, "..", "src", "tsconfig.json") }); +require("../src/glue/js"); + +const { normalize, resolve } = require("../src/util/path"); + +var test = "./Y/./N/./N/../../././../Y/./."; +assert.strictEqual(normalize(test), path.posix.normalize(test)); + +assert.strictEqual(resolve("../../..", "lib/util/i64.ts"), "..");