Skip to content

Commit

Permalink
Migrate to antlr4ng 3
Browse files Browse the repository at this point in the history
  • Loading branch information
alessiostalla committed Mar 25, 2024
1 parent 969e526 commit 12d6688
Show file tree
Hide file tree
Showing 8 changed files with 50 additions and 48 deletions.
6 changes: 3 additions & 3 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@
},
"peerDependencies": {
"@lionweb/core": "^0.6.2",
"antlr4ng": "^2.0.3",
"antlr4ng": "^3.0.4",
"cmd-ts": "^0.11.0",
"ecore": "^0.12.0"
},
Expand Down Expand Up @@ -122,8 +122,8 @@
"@types/node": "^18.19.2",
"@typescript-eslint/eslint-plugin": "^6.13.2",
"@typescript-eslint/parser": "^6.13.2",
"antlr4ng": "^2.0.3",
"antlr4ng-cli": "^1.0.6",
"antlr4ng": "^3.0.4",
"antlr4ng-cli": "^2.0.0",
"chai": "^4.3.10",
"cmd-ts": "^0.11.0",
"cross-env": "^7.0.3",
Expand Down
5 changes: 2 additions & 3 deletions src/parsing/parse-tree.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import {Node, Origin, Point, Position} from "../";
import {Interval, ParserRuleContext, ParseTree, TerminalNode, Token} from "antlr4ng";
import {ParserRuleContext, ParseTree, TerminalNode, Token} from "antlr4ng";

// Note: we cannot provide Kolasu-style extension methods on ParseTree because it's an interface.
// Also, Kolasu-style extension methods on Token are problematic because we use Token.EOF below.
Expand Down Expand Up @@ -116,6 +116,5 @@ Object.defineProperty(Node.prototype, "parseTree", {
ParserRuleContext.prototype.getOriginalText = function () {
const a = this.start.start;
const b = this.stop.stop;
const interval = new Interval(a, b);
return this.start.inputStream.getText(interval);
return this.start.inputStream?.getTextFromRange(a, b);
}
16 changes: 5 additions & 11 deletions src/parsing/tylasu-parser.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@ import {
CharStream,
CommonTokenStream,
ErrorNode,
Interval,
Lexer,
Parser as ANTLRParser,
ParserRuleContext,
Expand Down Expand Up @@ -103,7 +102,7 @@ export abstract class TylasuANTLRLexer<T extends TylasuToken> implements TylasuL
issues.push(Issue.syntactic(message, IssueSeverity.WARNING, Position.ofTokenEnd(t)))
}

const code = inputStream.getText(Interval.of(0, inputStream.size - 1));
const code = inputStream.getTextFromRange(0, inputStream.size - 1);
return new LexingResult(code, tokens, issues, performance.now() - time);
}

Expand Down Expand Up @@ -183,12 +182,7 @@ export abstract class TylasuParser<

processDescendantsAndErrors(
root,
it => {
if (it.exception != null) {
const message = `Recognition exception: ${it.exception.message}`;
issues.push(Issue.syntactic(message, IssueSeverity.ERROR, Position.ofParseTree(it)));
}
},
() => {},
it => {
const message = `Error node found (token: ${it.symbol?.text})`;
issues.push(Issue.syntactic(message, IssueSeverity.ERROR, Position.ofParseTree(it)));
Expand Down Expand Up @@ -217,7 +211,7 @@ export abstract class TylasuParser<
if (root != null) {
this.verifyParseTree(parser, issues, root);
}
const code = inputStream.getText(Interval.of(0, inputStream.size - 1));
const code = inputStream.getTextFromRange(0, inputStream.size - 1);
return new FirstStageParsingResult(code, root, issues, parser, performance.now() - time, lexingTime);
}

Expand All @@ -234,7 +228,7 @@ export abstract class TylasuParser<
if (!source) {
source = new StringSource(code);
}
code = new CharStream(code);
code = CharStream.fromString(code);
}
const start = performance.now();
const firstStage = this.parseFirstStage(code, measureLexingTime);
Expand All @@ -251,7 +245,7 @@ export abstract class TylasuParser<
delete node.origin;
}
}
const text = code.getText(Interval.of(0, code.size - 1));
const text = code.getTextFromRange(0, code.size - 1);
return new ParsingResult(text, ast, issues, undefined, firstStage, performance.now() - start);
}

Expand Down
22 changes: 7 additions & 15 deletions tests/mapping.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ import {expect} from "chai";

import {ASTTransformer, Child, GenericErrorNode, GenericNode, Node, Position} from "../src";
import {SimpleLangLexer} from "./parser/SimpleLangLexer";
import {CharStreams, CommonTokenStream, ParserRuleContext} from "antlr4ng";
import {CharStream, CommonTokenStream, ParserRuleContext} from "antlr4ng";
import {CompilationUnitContext, DisplayStmtContext, SetStmtContext, SimpleLangParser} from "./parser/SimpleLangParser";
import {ParseTreeOrigin} from "../src/parsing";
import {ParseTreeToASTTransformer} from "../src/mapping";
Expand Down Expand Up @@ -56,14 +56,14 @@ describe('Mapping of Parse Trees to ASTs', function() {
it("Generic node",
function () {
const transformer = new ParseTreeToASTTransformer();
const node = transformer.transform(new ParserRuleContext());
const node = transformer.transform(new ParserRuleContext(null));
expect(node).not.to.be.undefined;
expect(node instanceof GenericNode).to.be.true;
});
it("Node registered declaratively",
function () {
const code = "set foo = 123 + 45";
const lexer = new SimpleLangLexer(CharStreams.fromString(code));
const lexer = new SimpleLangLexer(CharStream.fromString(code));
const parser = new SimpleLangParser(new CommonTokenStream(lexer));
const cu = parser.compilationUnit();
const setStmt = cu.statement(0) as SetStmtContext;
Expand All @@ -84,7 +84,7 @@ describe('Mapping of Parse Trees to ASTs', function() {
describe('ParseTreeToASTTransformer', function () {
it("Test ParseTree Transformer", function () {
const code = "set foo = 123\ndisplay 456";
const lexer = new SimpleLangLexer(CharStreams.fromString(code));
const lexer = new SimpleLangLexer(CharStream.fromString(code));
const parser = new SimpleLangParser(new CommonTokenStream(lexer));
const pt = parser.compilationUnit();

Expand All @@ -104,7 +104,7 @@ describe('ParseTreeToASTTransformer', function () {
});
it("Test transformation with errors", function () {
const code = "set foo = \ndisplay @@@";
const lexer = new SimpleLangLexer(CharStreams.fromString(code));
const lexer = new SimpleLangLexer(CharStream.fromString(code));
const parser = new SimpleLangParser(new CommonTokenStream(lexer));
parser.removeErrorListeners();
const pt = parser.compilationUnit();
Expand All @@ -127,7 +127,7 @@ describe('ParseTreeToASTTransformer', function () {
});
it("Test generic node", function () {
const code = "set foo = 123\ndisplay 456";
const lexer = new SimpleLangLexer(CharStreams.fromString(code));
const lexer = new SimpleLangLexer(CharStream.fromString(code));
const parser = new SimpleLangParser(new CommonTokenStream(lexer));
const pt = parser.compilationUnit();

Expand All @@ -136,7 +136,7 @@ describe('ParseTreeToASTTransformer', function () {
});
it("test generic AST transformer", function () {
const code = "set foo = 123\ndisplay 456";
const lexer = new SimpleLangLexer(CharStreams.fromString(code));
const lexer = new SimpleLangLexer(CharStream.fromString(code));
const parser = new SimpleLangParser(new CommonTokenStream(lexer));
const pt = parser.compilationUnit();

Expand All @@ -163,20 +163,12 @@ const configure = function(transformer: ASTTransformer) : void {
transformer.registerNodeFactory<DisplayStmtContext, DisplayIntStatement>(
DisplayStmtContext,
source => {
if (source.exception || source.expression().exception) {
// We throw a custom error so that we can check that it's recorded in the AST
throw new Error("Parse error");
}
return new DisplayIntStatement(parseInt(source.expression().INT_LIT()!.getText()));
});

transformer.registerNodeFactory<SetStmtContext, SetStatement>(
SetStmtContext,
source => {
if (source.exception || source.expression().exception) {
// We throw a custom error so that we can check that it's recorded in the AST
throw new Error("Parse error");
}
const setStatement = new SetStatement();
setStatement.variable = source.ID().getText();
setStatement.value = parseInt(source.expression().INT_LIT()!.getText());
Expand Down
6 changes: 3 additions & 3 deletions tests/model/origin.test.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import {SimpleLangLexer} from "../parser/SimpleLangLexer";
import {CharStreams, CommonTokenStream} from "antlr4ng";
import {CharStream, CommonTokenStream} from "antlr4ng";
import {SimpleLangParser} from "../parser/SimpleLangParser";
import {ParseTreeOrigin} from "../../src/parsing";
import {expect} from "chai";
Expand All @@ -11,7 +11,7 @@ describe('Origin', function () {
const code = `set a = 1 + 2
input c is string
display 2 * 3`;
const lexer = new SimpleLangLexer(CharStreams.fromString(code));
const lexer = new SimpleLangLexer(CharStream.fromString(code));
const parser = new SimpleLangParser(new CommonTokenStream(lexer));
const parseTreeRoot = parser.compilationUnit();
expect(parser.numberOfSyntaxErrors).to.equal(0);
Expand All @@ -31,7 +31,7 @@ display 2 * 3`;
const code = `set a = 1 + 2
input c is string
display 2 * 3`;
const lexer = new SimpleLangLexer(CharStreams.fromString(code));
const lexer = new SimpleLangLexer(CharStream.fromString(code));
const parser = new SimpleLangParser(new CommonTokenStream(lexer));
const parseTreeRoot = parser.compilationUnit();
expect(parser.numberOfSyntaxErrors).to.equal(0);
Expand Down
6 changes: 3 additions & 3 deletions tests/model/position.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ import {expect} from "chai";

import {Point, START_POINT, Node, Position} from "../../src";
import {SimpleLangLexer} from "../parser/SimpleLangLexer";
import {CharStreams, CommonTokenStream} from "antlr4ng";
import {CharStream, CommonTokenStream} from "antlr4ng";
import {SetStmtContext, SimpleLangParser} from "../parser/SimpleLangParser";
import {positionOfParseTree} from "../../src/parsing";

Expand Down Expand Up @@ -49,7 +49,7 @@ describe('Position', function() {
it("ParserRuleContext position",
function () {
const code = "set foo = 123";
const lexer = new SimpleLangLexer(CharStreams.fromString(code));
const lexer = new SimpleLangLexer(CharStream.fromString(code));
const parser = new SimpleLangParser(new CommonTokenStream(lexer));
const cu = parser.compilationUnit();
const setStmt = cu.statement(0) as SetStmtContext;
Expand All @@ -60,7 +60,7 @@ describe('Position', function() {
it("Position derived from parse tree node",
function () {
const code = "set foo = 123";
const lexer = new SimpleLangLexer(CharStreams.fromString(code));
const lexer = new SimpleLangLexer(CharStream.fromString(code));
const parser = new SimpleLangParser(new CommonTokenStream(lexer));
const cu = parser.compilationUnit();
const setStmt = cu.statement(0) as SetStmtContext;
Expand Down
21 changes: 19 additions & 2 deletions tests/strumenta-playground.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,16 +4,33 @@ import {Issue, Source} from "../src";
import {registerECoreModel} from "../src/interop/ecore";
import {ParserTraceLoader, saveForStrumentaPlayground} from "../src/interop/strumenta-playground";
import {NodeSubclass} from "./nodes";
import {CharStream, CommonToken, Lexer, TerminalNode, Token, TokenStream} from "antlr4ng";
import {CharStream, CommonToken, Lexer, TerminalNode, Token, TokenSource, TokenStream} from "antlr4ng";
import * as fs from "fs";
import ECore from "ecore/dist/ecore";
import {ANTLRTokenFactory, ParsingResult} from "../src/parsing";
import {EcoreEnabledParser} from "../src/interop/ecore-enabled-parser";

class SyntheticToken extends CommonToken {

constructor(details: {
source?: [TokenSource | null, CharStream | null];
type: number;
channel?: number;
start?: number;
stop?: number;
text?: string;
line?: number;
tokenIndex?: number;
column?: number
}) {
super({...details, source: [null, null]});
}
}

describe('Strumenta Playground', function() {
it("Export round-trip", function () {
// We assign a fake parse tree, to ensure that we don't attempt to serialize ANTLR parse trees into the model.
const fakePT = new TerminalNode(new CommonToken([null, null], Token.EOF, Token.DEFAULT_CHANNEL, 0, 0));
const fakePT = new TerminalNode(new SyntheticToken({ type: Token.EOF }));
(fakePT.symbol as CommonToken).line = 1;
(fakePT.symbol as CommonToken).column = 0;
/* TODO not supported yet
Expand Down
16 changes: 8 additions & 8 deletions yarn.lock
Original file line number Diff line number Diff line change
Expand Up @@ -973,15 +973,15 @@ ansi-styles@^5.0.0:
resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-5.2.0.tgz#07449690ad45777d1924ac2abb2fc8895dba836b"
integrity sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==

antlr4ng-cli@^1.0.6:
version "1.0.6"
resolved "https://registry.yarnpkg.com/antlr4ng-cli/-/antlr4ng-cli-1.0.6.tgz#db22d74b4a931489ef0c59f146fc90624c6871ae"
integrity sha512-9ZeVmXu8v/UO4mLhSTyNSCi2fEEBzya2G04bqigwZVg5YdTlS6HhuAa2ruZy/HdV5erhL12USqPCJPMsq+OeKQ==
antlr4ng-cli@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/antlr4ng-cli/-/antlr4ng-cli-2.0.0.tgz#4f86f3c3818a2170aa1041d4e1633b489af00131"
integrity sha512-oAt5OSSYhRQn1PgahtpAP4Vp3BApCoCqlzX7Q8ZUWWls4hX59ryYuu0t7Hwrnfk796OxP/vgIJaqxdltd/oEvQ==

antlr4ng@^2.0.3:
version "2.0.3"
resolved "https://registry.yarnpkg.com/antlr4ng/-/antlr4ng-2.0.3.tgz#2dde0c90474597395917ed48b9f4394ec5563b3b"
integrity sha512-usw0hZsNbTQrJ899P6sHLlQbmKgwQ9UBYq5rh6ZFicPf5qtRJogPVTBj4fSB3OY5ZLP5TSZ7d8tR2p4I5KyiLQ==
antlr4ng@^3.0.4:
version "3.0.4"
resolved "https://registry.yarnpkg.com/antlr4ng/-/antlr4ng-3.0.4.tgz#71a47e6148ae75f72fa5f27fbed5ef3462815c7c"
integrity sha512-u1Ww6wVv9hq70E9AaYe5qW3ba8hvnjJdO3ZsKnb3iJWFV/medLEEhbyWwXCvvD2ef0ptdaiIUgmaazS/WE6uyQ==

anymatch@^3.0.3:
version "3.1.3"
Expand Down

0 comments on commit 12d6688

Please sign in to comment.