Skip to content

Commit

Permalink
Merge pull request #101 from carlegbert/use-typescript-private-proper…
Browse files Browse the repository at this point in the history
…ties

prefer ts private keyword instead of # prefix
  • Loading branch information
mike-lischke authored Nov 30, 2024
2 parents c7c3951 + 9f8d4ee commit bc64712
Show file tree
Hide file tree
Showing 24 changed files with 320 additions and 322 deletions.
8 changes: 4 additions & 4 deletions src/CharStream.ts
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ export class CharStreamImpl implements CharStream {
return "";
}

return this.#stringFromRange(start, stop + 1);
return this.stringFromRange(start, stop + 1);
}

public getTextFromInterval(interval: Interval): string {
Expand All @@ -140,11 +140,11 @@ export class CharStreamImpl implements CharStream {
return "";
}

return this.#stringFromRange(start, stop + 1);
return this.stringFromRange(start, stop + 1);
}

public toString(): string {
return this.#stringFromRange(0);
return this.stringFromRange(0);
}

public get size(): number {
Expand All @@ -159,7 +159,7 @@ export class CharStreamImpl implements CharStream {
return IntStream.UNKNOWN_SOURCE_NAME;
}

#stringFromRange(start: number, stop?: number): string {
private stringFromRange(start: number, stop?: number): string {
const data = this.data.slice(start, stop);
let result = "";
data.forEach((value) => {
Expand Down
75 changes: 37 additions & 38 deletions src/Lexer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ export abstract class Lexer extends Recognizer<LexerATNSimulator> implements Tok
*/
protected currentTokenStartLine = 0;

#input: CharStream;
private input: CharStream;

/**
* The goal of all lexer rules/methods is to create a token object.
Expand All @@ -106,13 +106,14 @@ export abstract class Lexer extends Recognizer<LexerATNSimulator> implements Tok
* something non-null so that the auto token emit mechanism will not
* emit another token.
*/
#token: Token | null = null;
private token: Token | null = null;

/**
* Once we see EOF on char stream, next token will be EOF.
* If you have DONE : EOF ; then you see DONE EOF.
*/
#hitEOF = false;
private hitEOF = false;
private factory: TokenFactory<Token>;

#modeStack: number[] = [];

Expand All @@ -123,32 +124,30 @@ export abstract class Lexer extends Recognizer<LexerATNSimulator> implements Tok
*/
#text?: string;

#factory: TokenFactory<Token>;

public constructor(input: CharStream, options?: Partial<LexerOptions>) {
super();

// Override the default options with the provided options.
this.options = { ...this.options, ...options };
this.#input = input;
this.#factory = CommonTokenFactory.DEFAULT;
this.input = input;
this.factory = CommonTokenFactory.DEFAULT;
}

public reset(seekBack = true): void {
// wack Lexer state variables
if (seekBack) {
this.#input.seek(0); // rewind the input
this.input.seek(0); // rewind the input
}

this.#token = null;
this.token = null;
this.type = Token.INVALID_TYPE;
this.channel = Token.DEFAULT_CHANNEL;
this.tokenStartCharIndex = -1;
this.currentTokenColumn = -1;
this.currentTokenStartLine = -1;
this.#text = undefined;

this.#hitEOF = false;
this.hitEOF = false;
this.mode = Lexer.DEFAULT_MODE;
this.#modeStack = [];

Expand All @@ -157,26 +156,26 @@ export abstract class Lexer extends Recognizer<LexerATNSimulator> implements Tok

/** @returns a token from this source; i.e., match a token on the char stream. */
public nextToken(): Token {
if (this.#input === null) {
if (this.input === null) {
throw new Error("nextToken requires a non-null input stream.");
}

/**
* Mark start location in char stream so unbuffered streams are
* guaranteed at least have text of current token
*/
const tokenStartMarker = this.#input.mark();
const tokenStartMarker = this.input.mark();
try {
while (true) {
if (this.#hitEOF) {
if (this.hitEOF) {
this.emitEOF();

return this.#token!;
return this.token!;
}

this.#token = null;
this.token = null;
this.channel = Token.DEFAULT_CHANNEL;
this.tokenStartCharIndex = this.#input.index;
this.tokenStartCharIndex = this.input.index;
this.currentTokenColumn = this.interpreter.column;
this.currentTokenStartLine = this.interpreter.line;
this.#text = undefined;
Expand All @@ -185,7 +184,7 @@ export abstract class Lexer extends Recognizer<LexerATNSimulator> implements Tok
this.type = Token.INVALID_TYPE;
let ttype = Lexer.SKIP;
try {
ttype = this.interpreter.match(this.#input, this.mode);
ttype = this.interpreter.match(this.input, this.mode);
} catch (e) {
if (e instanceof LexerNoViableAltException) {
this.notifyListeners(e); // report error
Expand All @@ -195,8 +194,8 @@ export abstract class Lexer extends Recognizer<LexerATNSimulator> implements Tok
}
}

if (this.#input.LA(1) === Token.EOF) {
this.#hitEOF = true;
if (this.input.LA(1) === Token.EOF) {
this.hitEOF = true;
}

if (this.type === Token.INVALID_TYPE) {
Expand All @@ -216,16 +215,16 @@ export abstract class Lexer extends Recognizer<LexerATNSimulator> implements Tok
continue;
}

if (this.#token === null) {
if (this.token === null) {
this.emit();
}

return this.#token!;
return this.token!;
}
} finally {
// make sure we release marker after match or
// unbuffered char stream will keep buffering
this.#input.release(tokenStartMarker);
this.input.release(tokenStartMarker);
}
}

Expand Down Expand Up @@ -278,7 +277,7 @@ export abstract class Lexer extends Recognizer<LexerATNSimulator> implements Tok
* rather than a single variable as this implementation does).
*/
public emitToken(token: Token): void {
this.#token = token;
this.token = token;
}

/**
Expand All @@ -289,24 +288,24 @@ export abstract class Lexer extends Recognizer<LexerATNSimulator> implements Tok
* custom Token objects or provide a new factory.
*/
public emit(): Token {
const t = this.#factory.create([this, this.#input], this.type, this.#text, this.channel,
const t = this.factory.create([this, this.input], this.type, this.#text, this.channel,
this.tokenStartCharIndex, this.getCharIndex() - 1, this.currentTokenStartLine, this.currentTokenColumn);
this.emitToken(t);

return t;
}

public emitEOF(): Token {
const eof = this.#factory.create([this, this.#input], Token.EOF, undefined, Token.DEFAULT_CHANNEL,
this.#input.index, this.#input.index - 1, this.line, this.column);
const eof = this.factory.create([this, this.input], Token.EOF, undefined, Token.DEFAULT_CHANNEL,
this.input.index, this.input.index - 1, this.line, this.column);
this.emitToken(eof);

return eof;
}

/** What is the index of the current character of lookahead? */
public getCharIndex(): number {
return this.#input.index;
return this.input.index;
}

/**
Expand All @@ -326,8 +325,8 @@ export abstract class Lexer extends Recognizer<LexerATNSimulator> implements Tok

public notifyListeners(e: LexerNoViableAltException): void {
const start = this.tokenStartCharIndex;
const stop = this.#input.index;
const text = this.#input.getTextFromRange(start, stop);
const stop = this.input.index;
const text = this.input.getTextFromRange(start, stop);
const msg = "token recognition error at: '" + this.getErrorDisplay(text) + "'";
this.errorListenerDispatch.syntaxError(this, null, this.currentTokenStartLine, this.currentTokenColumn, msg, e);
}
Expand Down Expand Up @@ -367,35 +366,35 @@ export abstract class Lexer extends Recognizer<LexerATNSimulator> implements Tok
* to do sophisticated error recovery if you are in a fragment rule.
*/
public recover(re: LexerNoViableAltException | RecognitionException): void {
if (this.#input.LA(1) !== Token.EOF) {
if (this.input.LA(1) !== Token.EOF) {
if (re instanceof LexerNoViableAltException) {
// skip a char and try again
this.interpreter.consume(this.#input);
this.interpreter.consume(this.input);
} else {
this.#input.consume();
this.input.consume();
}
}
}

public get inputStream(): CharStream {
return this.#input;
return this.input;
}

public set inputStream(input: CharStream) {
this.reset(false);
this.#input = input;
this.input = input;
}

public set tokenFactory(factory: TokenFactory<Token>) {
this.#factory = factory;
this.factory = factory;
};

public get tokenFactory(): TokenFactory<Token> {
return this.#factory;
return this.factory;
};

public get sourceName(): string {
return this.#input.getSourceName();
return this.input.getSourceName();
}

public get line(): number {
Expand All @@ -418,7 +417,7 @@ export abstract class Lexer extends Recognizer<LexerATNSimulator> implements Tok
if (this.#text) {
return this.#text;
} else {
return this.interpreter.getText(this.#input);
return this.interpreter.getText(this.input);
}
}

Expand Down
10 changes: 5 additions & 5 deletions src/LexerInterpreter.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,9 @@ import { Vocabulary } from "./Vocabulary.js";
import { CharStream } from "./CharStream.js";

export class LexerInterpreter extends Lexer {
private decisionToDFA: DFA[];
private sharedContextCache = new PredictionContextCache();

#grammarFileName: string;
#atn: ATN;

Expand All @@ -21,9 +24,6 @@ export class LexerInterpreter extends Lexer {
#modeNames: string[];

#vocabulary: Vocabulary;
#decisionToDFA: DFA[];

#sharedContextCache = new PredictionContextCache();

public constructor(grammarFileName: string, vocabulary: Vocabulary, ruleNames: string[], channelNames: string[],
modeNames: string[], atn: ATN, input: CharStream) {
Expand All @@ -41,11 +41,11 @@ export class LexerInterpreter extends Lexer {
this.#modeNames = modeNames.slice(0);
this.#vocabulary = vocabulary;

this.#decisionToDFA = atn.decisionToState.map((ds, i) => {
this.decisionToDFA = atn.decisionToState.map((ds, i) => {
return new DFA(ds, i);
});

this.interpreter = new LexerATNSimulator(this, atn, this.#decisionToDFA, this.#sharedContextCache);
this.interpreter = new LexerATNSimulator(this, atn, this.decisionToDFA, this.sharedContextCache);
}

public override get atn(): ATN {
Expand Down
24 changes: 12 additions & 12 deletions src/Parser.ts
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ export abstract class Parser extends Recognizer<ParserATNSimulator> {
* implemented as a parser listener so this field is not directly used by
* other parser methods.
*/
#tracer: TraceListener | null = null;
private tracer: TraceListener | null = null;

/**
* This field holds the deserialized {@link ATN} with bypass alternatives, created
Expand All @@ -99,7 +99,7 @@ export abstract class Parser extends Recognizer<ParserATNSimulator> {
*
* @see ATNDeserializationOptions#isGenerateRuleBypassTransitions()
*/
#bypassAltsAtnCache: ATN | null = null;
private bypassAltsAtnCache: ATN | null = null;

#inputStream!: TokenStream;

Expand Down Expand Up @@ -351,14 +351,14 @@ export abstract class Parser extends Recognizer<ParserATNSimulator> {
throw new Error("The current parser does not support an ATN with bypass alternatives.");
}

if (this.#bypassAltsAtnCache !== null) {
return this.#bypassAltsAtnCache;
if (this.bypassAltsAtnCache !== null) {
return this.bypassAltsAtnCache;
}

const deserializationOptions = { readOnly: false, verifyATN: true, generateRuleBypassTransitions: true };
this.#bypassAltsAtnCache = new ATNDeserializer(deserializationOptions).deserialize(serializedAtn);
this.bypassAltsAtnCache = new ATNDeserializer(deserializationOptions).deserialize(serializedAtn);

return this.#bypassAltsAtnCache;
return this.bypassAltsAtnCache;
}

/**
Expand Down Expand Up @@ -740,14 +740,14 @@ export abstract class Parser extends Recognizer<ParserATNSimulator> {
*/
public setTrace(trace: boolean): void {
if (!trace) {
this.removeParseListener(this.#tracer);
this.#tracer = null;
this.removeParseListener(this.tracer);
this.tracer = null;
} else {
if (this.#tracer !== null) {
this.removeParseListener(this.#tracer);
if (this.tracer !== null) {
this.removeParseListener(this.tracer);
}
this.#tracer = new TraceListener(this);
this.addParseListener(this.#tracer);
this.tracer = new TraceListener(this);
this.addParseListener(this.tracer);
}
}

Expand Down
Loading

0 comments on commit bc64712

Please sign in to comment.