Skip to content

Commit

Permalink
Cleanups made while backporting to Java
Browse files Browse the repository at this point in the history
  • Loading branch information
dberlin committed Apr 7, 2019
1 parent 6e5215f commit de17e6b
Show file tree
Hide file tree
Showing 4 changed files with 126 additions and 110 deletions.
18 changes: 9 additions & 9 deletions src/IncrementalParser.ts
Original file line number Diff line number Diff line change
Expand Up @@ -31,13 +31,14 @@ import { ParseTreeListener } from "./tree/ParseTreeListener";
export abstract class IncrementalParser extends Parser
implements ParseTreeListener {
// Current parser epoch. Incremented every time a new incremental parser is created.
public static _PARSER_EPOCH: number = 0;
public static get PARSER_EPOCH() {
return this._PARSER_EPOCH;
private static _GLOBAL_PARSER_EPOCH: number = 0;
public static get GLOBAL_PARSER_EPOCH() {
return this._GLOBAL_PARSER_EPOCH;
}
protected incrementParserEpoch() {
++IncrementalParser._PARSER_EPOCH;
return ++IncrementalParser._GLOBAL_PARSER_EPOCH;
}
public parserEpoch = -1;

private parseData: IncrementalParserData | undefined;
constructor(
Expand All @@ -46,7 +47,7 @@ export abstract class IncrementalParser extends Parser
) {
super(input);
this.parseData = parseData;
this.incrementParserEpoch();
this.parserEpoch = this.incrementParserEpoch();
// Register ourselves as our own parse listener. Life is weird.
this.addParseListener(this);
}
Expand All @@ -61,8 +62,7 @@ export abstract class IncrementalParser extends Parser
// Pop the min max stack the stream is using and return the interval.
private popCurrentMinMax(ctx: IncrementalParserRuleContext) {
let incStream = this.inputStream as IncrementalTokenStream;
let interval = incStream.popMinMax();
return interval;
return incStream.popMinMax();
}

/**
Expand All @@ -83,7 +83,7 @@ export abstract class IncrementalParser extends Parser
// See if we have seen this state before at this starting point.
let existingCtx = this.parseData.tryGetContext(
parentCtx ? parentCtx.depth() + 1 : 1,
this.state,
state,
ruleIndex,
this._input.LT(1).tokenIndex,
);
Expand Down Expand Up @@ -152,7 +152,7 @@ export abstract class IncrementalParser extends Parser
// During rule entry, we push a new min/max token state.
this.pushCurrentTokenToMinMax();
let incCtx = ctx as IncrementalParserRuleContext;
incCtx.epoch = IncrementalParser.PARSER_EPOCH;
incCtx.epoch = this.parserEpoch;
}
public exitEveryRule(ctx: ParserRuleContext) {
// On exit, we need to merge the min max into the current context,
Expand Down
19 changes: 6 additions & 13 deletions src/IncrementalParserData.ts
Original file line number Diff line number Diff line change
Expand Up @@ -89,14 +89,6 @@ export interface TokenChange {
newToken?: CommonToken;
}

// Unfortunately we have to make this public in order to be able to use it
// from IncrementalParserData properly.
// We do this in a type safe way however.
class SyncableTokenStream extends IncrementalTokenStream {
public sync(i: number): boolean {
return super.sync(i);
}
}
/**
*
* This class computes and stores data needed by the incremental parser.
Expand Down Expand Up @@ -270,9 +262,9 @@ export class IncrementalParserData {
depth: number,
state: number,
rule: number,
tokenindex: number,
tokenIndex: number,
) {
return `${depth},${rule},${tokenindex}`;
return `${depth},${rule},${tokenIndex}`;
}
/**
* Index a given parse tree and adjust the min/max ranges
Expand All @@ -283,6 +275,7 @@ export class IncrementalParserData {
// could walk the old parse tree as the parse proceeds. This is left as
// a future optimization. We also could just allow passing in
// constructed maps if this turns out to be slow.
this.tokenStream.fill();
let listener = new IncrementalParserData.ParseTreeProcessor(this);
ParseTreeWalker.DEFAULT.walk(listener, tree);
}
Expand All @@ -296,12 +289,12 @@ export class IncrementalParserData {
*/
class ParseTreeProcessor implements ParseTreeListener {
private incrementalData: IncrementalParserData;
private tokenStream: SyncableTokenStream;
private tokenStream: IncrementalTokenStream;
private tokenOffsets: TokenOffsetRange[];
private ruleStartMap: Map<string, IncrementalParserRuleContext>;
constructor(incrementalData: IncrementalParserData) {
this.incrementalData = incrementalData;
this.tokenStream = incrementalData.tokenStream as SyncableTokenStream;
this.tokenStream = incrementalData.tokenStream;
this.tokenOffsets = incrementalData.tokenOffsets;
this.ruleStartMap = incrementalData.ruleStartMap;
}
Expand All @@ -319,7 +312,7 @@ export class IncrementalParserData {
);
if (newTokenIndex !== undefined) {
let syncableStream = this.tokenStream;
syncableStream.sync(newTokenIndex);
// We filled the stream before the walk
return syncableStream.get(newTokenIndex);
}
return undefined;
Expand Down
Loading

0 comments on commit de17e6b

Please sign in to comment.