Clean up tests directory a bit

This commit is contained in:
dcodeIO 2018-03-16 15:45:18 +01:00
parent ee94634ffb
commit eef923d124
8 changed files with 56 additions and 124 deletions

View File

@ -7,14 +7,14 @@ consists of:
* A test file that is parsed or compiled (.ts) * A test file that is parsed or compiled (.ts)
* One or multiple automatically generated fixtures generated from the source file * One or multiple automatically generated fixtures generated from the source file
Creating a test: ### Creating a test:
* Run `npm run clean` to make sure that the sources are tested instead of the distribution * Run `npm run clean` to make sure that the sources are tested instead of the distribution
* Create a new test file (.ts) within the respective directory (see below) that contains your test code * Create a new test file (.ts) within the respective directory (see below) that contains your test code
* Follow the instructions below to generate the first fixture(s) * Follow the instructions below to generate the first fixture(s)
* Make sure the fixture(s) contain exactly what you'd expect * Make sure the fixture(s) contain exactly what you'd expect
Updating a test: ### Updating a test:
* Run `npm run clean` to make sure that the sources are tested instead of the distribution * Run `npm run clean` to make sure that the sources are tested instead of the distribution
* Make changes to the respective test file (.ts) * Make changes to the respective test file (.ts)
@ -54,11 +54,11 @@ Note that the parser suite currently can't recreate just a specific fixture.
Compiler Compiler
-------- --------
General directory: [tests/compiler](./compiler) General directory: [tests/compiler](./compiler)<br />
Standard library directory: [tests/compiler/std](./compiler/std) Standard library directory: [tests/compiler/std](./compiler/std)
The source file is parsed and compiled to a module, validated, interpreted and the resulting module The source file is parsed and compiled to a module, validated and the resulting module converted to
converted to WebAsssembly text format. WebAsssembly text format.
The text format output is compared to its fixture and the module interpreted in a WebAssembly VM. To The text format output is compared to its fixture and the module interpreted in a WebAssembly VM. To
assert for runtime conditions, the `assert` builtin can be used. Note that tree-shaking is enabled assert for runtime conditions, the `assert` builtin can be used. Note that tree-shaking is enabled
@ -94,5 +94,9 @@ $> npm run test:compiler -- testNameWithoutTs --create
Other Other
----- -----
Tests in other directories are not run automatically and do not need to be updated. Most of them Tests in other directories are not run automatically and do not need to be updated.
are legacy tests.
* [tests/allocators](./allocators) contains the memory allocator test suite
* [tests/binaryen](./binaryen) contains various triggers for earlier Binaryen issues
* [tests/tokenizer](./tokenizer.js) is a visual test for the tokenizer tokenizing itself
* [tests/util-path](./util-path.js) is a sanity test for the path utility

View File

@ -1,43 +0,0 @@
var asc = require("../dist/asc.js");
console.log(Object.keys(asc));
var stdout = asc.createMemoryStream();
var stderr = asc.createMemoryStream();
var stats = asc.createStats();
process.exitCode = asc.main([
"test.ts",
"--validate",
"--optimize",
"--measure",
"--textFile", // -> stdout
"--binaryFile", "test.wasm",
"--sourceMap"
], {
stdout: stdout,
stderr: stderr,
stats: stats,
readFile: function(filename) {
console.log("<< readFile: " + filename);
if (filename === "/test.ts") { // sic: browser path
return "export function foo(): void {}";
}
throw Error("File not found: " + filename);
},
writeFile: function(filename, contents) {
console.log(">> writeFile: " + filename + " (" + contents.length + " bytes)");
},
listFiles: function(dirname) {
console.log("<< listFiles: " + dirname);
return [];
}
});
console.log(">> stdout >>");
console.log(stdout.toString());
console.log(">> stderr >>");
console.error(stderr.toString());
console.log(">> stats >>", stats);

View File

@ -1,37 +0,0 @@
import { I64 } from "../src/util/i64";
import * as Long from "long";
import * as assert from "assert";
function test(fn, lo, hi, otherLo, otherHi) {
let expected = Long.fromBits(lo, hi)[fn](Long.fromBits(otherLo, otherHi));
let actual = new I64(lo, hi); actual[fn + "32"](otherLo, otherHi);
assert.equal(actual.lo, expected.low, fn + " lo ");
assert.equal(actual.hi, expected.high, fn + " hi");
}
function rand() {
let r = Math.random();
// 10% edge cases
if (r < 0.05) return 0x80000000 | 0;
else if (r < 0.1) return 0;
return (Math.random() * 0xffffffff) | 0;
}
let i = 0;
while (i++ < 1000000) {
let lo = rand();
let hi = rand();
let otherLo = rand();
let otherHi = rand();
// console.log(lo, hi, otherLo, otherHi);
test("add", lo, hi, otherLo, otherHi);
test("sub", lo, hi, otherLo, otherHi);
test("mul", lo, hi, otherLo, otherHi);
test("shl", lo, hi, otherLo, otherHi);
test("shr", lo, hi, otherLo, otherHi);
test("shru", lo, hi, otherLo, otherHi);
test("and", lo, hi, otherLo, otherHi);
test("or", lo, hi, otherLo, otherHi);
test("xor", lo, hi, otherLo, otherHi);
}
console.log("done");

View File

@ -7,7 +7,7 @@ require("ts-node").register({ project: require("path").join(__dirname, "..", "sr
require("../src/glue/js"); require("../src/glue/js");
var Parser = require("../src/parser").Parser; var Parser = require("../src/parser").Parser;
var serializeSource = require("../src/extra/ast").serializeSource; var ASTBuilder = require("../src/extra/ast").ASTBuilder;
var isCreate = process.argv[2] === "--create"; var isCreate = process.argv[2] === "--create";
var filter = process.argv.length > 2 && !isCreate ? "*" + process.argv[2] + "*.ts" : "**.ts"; var filter = process.argv.length > 2 && !isCreate ? "*" + process.argv[2] + "*.ts" : "**.ts";
@ -21,13 +21,10 @@ glob.sync(filter, { cwd: __dirname + "/parser" }).forEach(filename => {
var failed = false; var failed = false;
var parser = new Parser(); var parser = new Parser();
// parser.silentDiagnostics = true;
var sourceText = fs.readFileSync(__dirname + "/parser/" + filename, { encoding: "utf8" }).replace(/\r?\n/g, "\n").replace(/^\/\/.*\r?\n/mg, ""); var sourceText = fs.readFileSync(__dirname + "/parser/" + filename, { encoding: "utf8" }).replace(/\r?\n/g, "\n").replace(/^\/\/.*\r?\n/mg, "");
parser.parseFile(sourceText, filename, true); parser.parseFile(sourceText, filename, true);
var serializedSourceText = ASTBuilder.build(parser.program.sources[0]);
var sb = []; var actual = serializedSourceText + parser.diagnostics.map(diagnostic => "// " + diagnostic + "\n").join("");
serializeSource(parser.program.sources[0], sb);
var actual = sb.join("") + parser.diagnostics.map(diagnostic => "// " + diagnostic + "\n").join("");
var fixture = filename + ".fixture.ts"; var fixture = filename + ".fixture.ts";
if (isCreate) { if (isCreate) {

View File

@ -1,8 +0,0 @@
import { normalizePath, resolvePath } from "../src/util";
import * as path from "path";
let test = "./Y/./N/./N/../../././../Y/./.";
console.log(normalizePath(test));
console.log(path.posix.normalize(test));
console.log(resolvePath("../../..", "lib/util/i64.ts"));

30
tests/tokenizer.js Normal file
View File

@ -0,0 +1,30 @@
const fs = require("fs");
const path = require("path");
require("ts-node").register({ project: path.join(__dirname, "..", "src", "tsconfig.json") });
require("../src/glue/js");
const { Tokenizer, Token } = require("../src/tokenizer");
const { Source, SourceKind } = require("../src/ast");
const text = fs.readFileSync(__dirname + "/../src/tokenizer.ts").toString();
const tn = new Tokenizer(new Source("compiler.ts", text, SourceKind.ENTRY));
do {
let token = tn.next();
if (token == Token.IDENTIFIER) {
console.log(Token[token] + " > " + tn.readIdentifier());
} else if (token == Token.INTEGERLITERAL) {
console.log(Token[token] + " > " + tn.readInteger());
} else if (token == Token.FLOATLITERAL) {
console.log(Token[token] + " > " + tn.readFloat());
} else if (token == Token.STRINGLITERAL) {
console.log(Token[token] + " > " + tn.readString());
} else if (token == Token.ENDOFFILE) {
console.log(Token[token]);
break;
} else {
let range = tn.range();
console.log(Token[token] + " > " + range.source.text.substring(range.start, range.end));
}
} while (true);

View File

@ -1,23 +0,0 @@
import "../src/glue/js";
import { Tokenizer, Token } from "../src/tokenizer";
import { Source } from "../src/ast";
import * as fs from "fs";
const text = fs.readFileSync(__dirname + "/../src/tokenizer.ts").toString();
const tn = new Tokenizer(new Source("tokenizer.ts", text));
let token;
do {
token = tn.next();
let range = tn.range();
console.log(Token[token] + " -> " + range.source.text.substring(range.start, range.end));
if (token == Token.IDENTIFIER)
console.log("> " + tn.readIdentifier());
else if (token == Token.INTEGERLITERAL)
console.log("> " + tn.readInteger());
else if (token == Token.FLOATLITERAL)
console.log("> " + tn.readFloat());
else if (token == Token.STRINGLITERAL)
console.log("> " + tn.readString());
} while (token != Token.ENDOFFILE);

12
tests/util-path.js Normal file
View File

@ -0,0 +1,12 @@
const path = require("path");
const assert = require("assert");
require("ts-node").register({ project: path.join(__dirname, "..", "src", "tsconfig.json") });
require("../src/glue/js");
const { normalize, resolve } = require("../src/util/path");
var test = "./Y/./N/./N/../../././../Y/./.";
assert.strictEqual(normalize(test), path.posix.normalize(test));
assert.strictEqual(resolve("../../..", "lib/util/i64.ts"), "..");