mirror of
https://github.com/fluencelabs/assemblyscript
synced 2025-06-24 04:01:46 +00:00
Asterisk imports parsing; Pussyfooting around stdlib
This commit is contained in:
39
src/ast.ts
39
src/ast.ts
@ -391,6 +391,18 @@ export abstract class Node {
|
||||
const stmt: ImportStatement = new ImportStatement();
|
||||
stmt.range = range;
|
||||
for (let i: i32 = 0, k: i32 = (stmt.declarations = declarations).length; i < k; ++i) declarations[i].parent = stmt;
|
||||
stmt.namespaceName = null;
|
||||
stmt.path = path;
|
||||
stmt.normalizedPath = resolvePath(normalizePath(path.value), range.source.normalizedPath);
|
||||
stmt.internalPath = mangleInternalPath(stmt.normalizedPath);
|
||||
return stmt;
|
||||
}
|
||||
|
||||
static createImportAll(identifier: IdentifierExpression, path: StringLiteralExpression, range: Range): ImportStatement {
|
||||
const stmt: ImportStatement = new ImportStatement();
|
||||
stmt.range = range;
|
||||
stmt.declarations = null;
|
||||
stmt.namespaceName = identifier;
|
||||
stmt.path = path;
|
||||
stmt.normalizedPath = resolvePath(normalizePath(path.value), range.source.normalizedPath);
|
||||
stmt.internalPath = mangleInternalPath(stmt.normalizedPath);
|
||||
@ -1569,8 +1581,10 @@ export class ImportStatement extends Statement {
|
||||
|
||||
kind = NodeKind.IMPORT;
|
||||
|
||||
/** Array of member declarations. */
|
||||
declarations: ImportDeclaration[];
|
||||
/** Array of member declarations or `null` if an asterisk import. */
|
||||
declarations: ImportDeclaration[] | null;
|
||||
/** Name of the local namespace, if an asterisk import. */
|
||||
namespaceName: IdentifierExpression | null;
|
||||
/** Path being imported from. */
|
||||
path: StringLiteralExpression;
|
||||
/** Normalized path. */
|
||||
@ -1579,13 +1593,22 @@ export class ImportStatement extends Statement {
|
||||
internalPath: string;
|
||||
|
||||
serialize(sb: string[]): void {
|
||||
sb.push("import {\n");
|
||||
for (let i: i32 = 0, k: i32 = this.declarations.length; i < k; ++i) {
|
||||
if (i > 0)
|
||||
sb.push(",\n");
|
||||
this.declarations[i].serialize(sb);
|
||||
if (this.declarations) {
|
||||
sb.push("import {\n");
|
||||
for (let i: i32 = 0, k: i32 = this.declarations.length; i < k; ++i) {
|
||||
if (i > 0)
|
||||
sb.push(",\n");
|
||||
this.declarations[i].serialize(sb);
|
||||
}
|
||||
sb.push("\n}");
|
||||
} else {
|
||||
sb.push("import * as ");
|
||||
if (this.namespaceName)
|
||||
this.namespaceName.serialize(sb);
|
||||
else
|
||||
throw new Error("missing asterisk import identifier");
|
||||
}
|
||||
sb.push("\n} from ");
|
||||
sb.push(" from ");
|
||||
this.path.serialize(sb);
|
||||
}
|
||||
}
|
||||
|
@ -2093,7 +2093,7 @@ export class Compiler extends DiagnosticEmitter {
|
||||
return this.module.createBinary(BinaryOp.EqF64, operand, this.module.createF64(0));
|
||||
}
|
||||
op = this.currentType.isLongInteger
|
||||
? UnaryOp.EqzI64 // TODO: does this yield i64 0/1?
|
||||
? UnaryOp.EqzI64
|
||||
: UnaryOp.EqzI32;
|
||||
this.currentType = Type.bool;
|
||||
break;
|
||||
|
@ -93,7 +93,7 @@ export class Parser extends DiagnosticEmitter {
|
||||
const normalizedPath: string = normalizePath(path);
|
||||
for (let i: i32 = 0, k: i32 = this.program.sources.length; i < k; ++i)
|
||||
if (this.program.sources[i].normalizedPath == normalizedPath)
|
||||
throw new Error("duplicate source");
|
||||
return; // already parsed
|
||||
this.seenlog.add(normalizedPath);
|
||||
|
||||
const source: Source = new Source(path, text, isEntry);
|
||||
@ -104,10 +104,10 @@ export class Parser extends DiagnosticEmitter {
|
||||
|
||||
while (!tn.skip(Token.ENDOFFILE)) {
|
||||
const statement: Statement | null = this.parseTopLevelStatement(tn);
|
||||
if (!statement)
|
||||
return;
|
||||
statement.parent = source;
|
||||
source.statements.push(statement);
|
||||
if (statement) {
|
||||
statement.parent = source;
|
||||
source.statements.push(statement);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -201,7 +201,7 @@ export class Parser extends DiagnosticEmitter {
|
||||
default:
|
||||
if (hasModifier(ModifierKind.EXPORT, modifiers)) {
|
||||
tn.reset();
|
||||
statement = this.parseExport(tn, modifiers); // TODO: why exactly does this have modifiers again?
|
||||
statement = this.parseExport(tn, modifiers); // TODO: why exactly does this have modifiers again? 'declare'?
|
||||
} else {
|
||||
if (modifiers) {
|
||||
if (modifier = getModifier(ModifierKind.DECLARE, modifiers))
|
||||
@ -713,13 +713,10 @@ export class Parser extends DiagnosticEmitter {
|
||||
|
||||
let isGetter: bool = false;
|
||||
let isSetter: bool = false;
|
||||
if (tn.skip(Token.GET)) {
|
||||
if (isGetter = tn.skip(Token.GET))
|
||||
modifiers = addModifier(Node.createModifier(ModifierKind.GET, tn.range()), modifiers);
|
||||
isGetter = true;
|
||||
} else if (tn.skip(Token.SET)) { // can't be both
|
||||
else if (isSetter = tn.skip(Token.SET)) // can't be both
|
||||
modifiers = addModifier(Node.createModifier(ModifierKind.SET, tn.range()), modifiers);
|
||||
isSetter = true;
|
||||
}
|
||||
|
||||
if (tn.skip(Token.CONSTRUCTOR) || tn.skip(Token.IDENTIFIER)) { // order is important
|
||||
const identifier: IdentifierExpression = tn.token == Token.CONSTRUCTOR
|
||||
@ -899,10 +896,12 @@ export class Parser extends DiagnosticEmitter {
|
||||
}
|
||||
|
||||
parseImport(tn: Tokenizer): ImportStatement | null {
|
||||
// at 'import': '{' (ImportMember (',' ImportMember)*)? '}' 'from' StringLiteral ';'?
|
||||
const startRange: Range = tn.range();
|
||||
// at 'import': ('{' (ImportMember (',' ImportMember)*)? '}' | '*' 'as' Identifier) 'from' StringLiteral ';'?
|
||||
const startPos: i32 = tn.tokenPos;
|
||||
let members: ImportDeclaration[] | null = null;
|
||||
let namespaceName: IdentifierExpression | null = null;
|
||||
if (tn.skip(Token.OPENBRACE)) {
|
||||
const members: ImportDeclaration[] = new Array();
|
||||
members = new Array();
|
||||
if (!tn.skip(Token.CLOSEBRACE)) {
|
||||
do {
|
||||
const member: ImportDeclaration | null = this.parseImportDeclaration(tn);
|
||||
@ -915,22 +914,49 @@ export class Parser extends DiagnosticEmitter {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
if (tn.skip(Token.FROM)) {
|
||||
if (tn.skip(Token.STRINGLITERAL)) {
|
||||
const path: StringLiteralExpression = Node.createStringLiteral(tn.readString(), tn.range());
|
||||
const ret: ImportStatement = Node.createImport(members, path, Range.join(startRange, tn.range()));
|
||||
if (!this.seenlog.has(ret.normalizedPath)) {
|
||||
this.backlog.push(ret.normalizedPath);
|
||||
this.seenlog.add(ret.normalizedPath);
|
||||
}
|
||||
tn.skip(Token.SEMICOLON);
|
||||
return ret;
|
||||
} else
|
||||
this.error(DiagnosticCode.String_literal_expected, tn.range());
|
||||
} else
|
||||
this.error(DiagnosticCode._0_expected, tn.range(), "from");
|
||||
} else
|
||||
} else if (tn.skip(Token.ASTERISK)) {
|
||||
if (tn.skip(Token.AS)) {
|
||||
if (tn.skip(Token.IDENTIFIER)) {
|
||||
namespaceName = Node.createIdentifier(tn.readIdentifier(), tn.range());
|
||||
} else {
|
||||
this.error(DiagnosticCode.Identifier_expected, tn.range());
|
||||
return null;
|
||||
}
|
||||
} else {
|
||||
this.error(DiagnosticCode._0_expected, tn.range(), "as");
|
||||
return null;
|
||||
}
|
||||
} else {
|
||||
this.error(DiagnosticCode._0_expected, tn.range(), "{");
|
||||
return null;
|
||||
}
|
||||
if (tn.skip(Token.FROM)) {
|
||||
if (tn.skip(Token.STRINGLITERAL)) {
|
||||
const path: StringLiteralExpression = Node.createStringLiteral(tn.readString(), tn.range());
|
||||
let ret: ImportStatement;
|
||||
if (members) {
|
||||
if (!namespaceName)
|
||||
ret = Node.createImport(members, path, tn.range(startPos, tn.pos));
|
||||
else {
|
||||
assert(false);
|
||||
return null;
|
||||
}
|
||||
} else if (namespaceName) {
|
||||
ret = Node.createImportAll(namespaceName, path, tn.range(startPos, tn.pos));
|
||||
} else {
|
||||
assert(false);
|
||||
return null;
|
||||
}
|
||||
if (!this.seenlog.has(ret.normalizedPath)) {
|
||||
this.backlog.push(ret.normalizedPath);
|
||||
this.seenlog.add(ret.normalizedPath);
|
||||
}
|
||||
tn.skip(Token.SEMICOLON);
|
||||
return ret;
|
||||
} else
|
||||
this.error(DiagnosticCode.String_literal_expected, tn.range());
|
||||
} else
|
||||
this.error(DiagnosticCode._0_expected, tn.range(), "from");
|
||||
return null;
|
||||
}
|
||||
|
||||
@ -1363,7 +1389,7 @@ export class Parser extends DiagnosticEmitter {
|
||||
// see: http://www.engr.mun.ca/~theo/Misc/exp_parsing.htm#climbing
|
||||
|
||||
parseExpressionStart(tn: Tokenizer): Expression | null {
|
||||
const token: Token = tn.next();
|
||||
const token: Token = tn.next(true);
|
||||
const startPos: i32 = tn.tokenPos;
|
||||
let expr: Expression | null = null;
|
||||
|
||||
|
@ -501,11 +501,21 @@ export class Program extends DiagnosticEmitter {
|
||||
}
|
||||
|
||||
private initializeImports(statement: ImportStatement, queuedExports: Map<string,QueuedExport>, queuedImports: QueuedImport[]): void {
|
||||
const members: ImportDeclaration[] = statement.declarations;
|
||||
for (let i: i32 = 0, k: i32 = members.length; i < k; ++i) {
|
||||
const declaration: ImportDeclaration = members[i];
|
||||
this.initializeImport(declaration, statement.internalPath, queuedExports, queuedImports);
|
||||
}
|
||||
const declarations: ImportDeclaration[] | null = statement.declarations;
|
||||
if (declarations) {
|
||||
for (let i: i32 = 0, k: i32 = declarations.length; i < k; ++i) {
|
||||
const declaration: ImportDeclaration = declarations[i];
|
||||
this.initializeImport(declaration, statement.internalPath, queuedExports, queuedImports);
|
||||
}
|
||||
} else if (statement.namespaceName) {
|
||||
const internalName: string = statement.range.source.internalPath + "/" + statement.namespaceName.name;
|
||||
if (this.elements.has(internalName)) {
|
||||
this.error(DiagnosticCode.Duplicate_identifier_0, statement.namespaceName.range, internalName);
|
||||
return;
|
||||
}
|
||||
this.error(DiagnosticCode.Operation_not_supported, statement.range); // TODO
|
||||
} else
|
||||
assert(false);
|
||||
}
|
||||
|
||||
private initializeImport(declaration: ImportDeclaration, internalPath: string, queuedExports: Map<string,QueuedExport>, queuedImports: QueuedImport[]): void {
|
||||
|
@ -731,6 +731,17 @@ export class Tokenizer extends DiagnosticEmitter {
|
||||
}
|
||||
}
|
||||
|
||||
// skipUntil(token1: Token, token2: Token = -1): bool {
|
||||
// let next: Token;
|
||||
// do {
|
||||
// if ((next = this.peek()) == Token.ENDOFFILE)
|
||||
// return false;
|
||||
// if (next == token1 || next == token2)
|
||||
// return true;
|
||||
// this.next();
|
||||
// } while (true);
|
||||
// }
|
||||
|
||||
mark(): void {
|
||||
this.markedPos = this.pos;
|
||||
this.markedToken = this.token;
|
||||
|
@ -22,7 +22,7 @@ export const enum CharCode {
|
||||
THINSPACE = 0x2009,
|
||||
HAIRSPACE = 0x200A,
|
||||
ZEROWIDTHSPACE = 0x200B,
|
||||
NARRINOBREAKSPACE = 0x202F,
|
||||
NARROWNOBREAKSPACE = 0x202F,
|
||||
IDEOGRAPHICSPACE = 0x3000,
|
||||
MATHEMATICALSPACE = 0x205F,
|
||||
OGHAM = 0x1680,
|
||||
@ -156,7 +156,7 @@ export function isWhiteSpace(c: i32): bool {
|
||||
case CharCode.NONBREAKINGSPACE:
|
||||
case CharCode.NEXTLINE:
|
||||
case CharCode.OGHAM:
|
||||
case CharCode.NARRINOBREAKSPACE:
|
||||
case CharCode.NARROWNOBREAKSPACE:
|
||||
case CharCode.MATHEMATICALSPACE:
|
||||
case CharCode.IDEOGRAPHICSPACE:
|
||||
case CharCode.BYTEORDERMARK:
|
||||
|
Reference in New Issue
Block a user