mirror of
https://github.com/Wack0/IFPSTools.NET.git
synced 2025-06-18 10:45:36 -04:00
cc: add timer for each stage (via compile time config)
cc: optimise lexer for speed (~10x speed improvement) cc: implement OperatorParser class as per TODO in original codebase (seems refactoring chains of parser-combinators leads to at least SOME speed improvement)
This commit is contained in:
parent
b20d5d88b5
commit
adc9a2dde1
@ -8,30 +8,49 @@ using Parsing;
|
||||
|
||||
namespace Driver {
|
||||
public class Compiler {
|
||||
private static readonly bool SHOW_TIME = false;
|
||||
private static System.Diagnostics.Stopwatch StartTimer()
|
||||
{
|
||||
var watch = new System.Diagnostics.Stopwatch();
|
||||
watch.Start();
|
||||
return watch;
|
||||
}
|
||||
private Compiler(String source) {
|
||||
this.Source = source;
|
||||
|
||||
// Lexical analysis
|
||||
var watch = StartTimer();
|
||||
Scanner scanner = new Scanner(source);
|
||||
this.Tokens = scanner.Tokens.ToImmutableList();
|
||||
watch.Stop();
|
||||
if (SHOW_TIME) Console.WriteLine("lexer: {0} ms", watch.ElapsedMilliseconds);
|
||||
|
||||
// Parse
|
||||
watch = StartTimer();
|
||||
var parserResult = CParsers.Parse(this.Tokens);
|
||||
watch.Stop();
|
||||
if (SHOW_TIME) Console.WriteLine("parser: {0} ms", watch.ElapsedMilliseconds);
|
||||
if (!parserResult.IsSuccessful || parserResult.Source.Count() != 1) {
|
||||
throw new InvalidOperationException($"Parsing error:\n{parserResult}");
|
||||
}
|
||||
this.SyntaxTree = parserResult.Result;
|
||||
|
||||
// Semantic analysis
|
||||
watch = StartTimer();
|
||||
var semantReturn = this.SyntaxTree.GetTranslnUnit();
|
||||
watch.Stop();
|
||||
if (SHOW_TIME) Console.WriteLine("semant: {0} ms", watch.ElapsedMilliseconds);
|
||||
this.AbstractSyntaxTree = semantReturn.Value;
|
||||
this.Environment = semantReturn.Env;
|
||||
|
||||
// Code generation
|
||||
watch = StartTimer();
|
||||
var state = new CGenState();
|
||||
this.AbstractSyntaxTree.CodeGenerate(state);
|
||||
state.EmitCallsToCtor();
|
||||
this.Script = state.Script;
|
||||
watch.Stop();
|
||||
if (SHOW_TIME) Console.WriteLine("codegen: {0} ms", watch.ElapsedMilliseconds);
|
||||
}
|
||||
|
||||
public static Compiler FromSource(String src) {
|
||||
|
@ -7,8 +7,10 @@ using static Parsing.ParserCombinator;
|
||||
using System.Collections.Immutable;
|
||||
|
||||
namespace Parsing {
|
||||
public partial class CParsers {
|
||||
static CParsers() {
|
||||
public partial class CParsers
|
||||
{
|
||||
static CParsers()
|
||||
{
|
||||
SetExpressionRules();
|
||||
SetDeclarationRules();
|
||||
SetExternalDefinitionRules();
|
||||
@ -18,44 +20,56 @@ namespace Parsing {
|
||||
public static IParserResult<TranslnUnit> Parse(IEnumerable<Token> tokens) =>
|
||||
TranslationUnit.Parse(new ParserInput(new ParserEnvironment(), tokens));
|
||||
|
||||
public class ConstCharParser : IParser<Expr> {
|
||||
public class ConstCharParser : IParser<Expr>
|
||||
{
|
||||
public RuleCombining Combining => RuleCombining.NONE;
|
||||
public IParserResult<Expr> Parse(ParserInput input) {
|
||||
public IParserResult<Expr> Parse(ParserInput input)
|
||||
{
|
||||
var token = input.Source.First() as TokenCharConst;
|
||||
if (token == null) {
|
||||
if (token == null)
|
||||
{
|
||||
return new ParserFailed<Expr>(input);
|
||||
}
|
||||
return ParserSucceeded.Create(new IntLiteral(token.Value, TokenInt.IntSuffix.NONE), input.Environment, input.Source.Skip(1));
|
||||
}
|
||||
}
|
||||
|
||||
public class ConstIntParser : IParser<Expr> {
|
||||
public class ConstIntParser : IParser<Expr>
|
||||
{
|
||||
public RuleCombining Combining => RuleCombining.NONE;
|
||||
public IParserResult<Expr> Parse(ParserInput input) {
|
||||
public IParserResult<Expr> Parse(ParserInput input)
|
||||
{
|
||||
var token = input.Source.First() as TokenInt;
|
||||
if (token == null) {
|
||||
if (token == null)
|
||||
{
|
||||
return new ParserFailed<Expr>(input);
|
||||
}
|
||||
return ParserSucceeded.Create(new IntLiteral(token.Val, token.Suffix), input.Environment, input.Source.Skip(1));
|
||||
}
|
||||
}
|
||||
|
||||
public class ConstFloatParser : IParser<Expr> {
|
||||
public class ConstFloatParser : IParser<Expr>
|
||||
{
|
||||
public RuleCombining Combining => RuleCombining.NONE;
|
||||
public IParserResult<Expr> Parse(ParserInput input) {
|
||||
public IParserResult<Expr> Parse(ParserInput input)
|
||||
{
|
||||
var token = input.Source.First() as TokenFloat;
|
||||
if (token == null) {
|
||||
if (token == null)
|
||||
{
|
||||
return new ParserFailed<Expr>(input);
|
||||
}
|
||||
return ParserSucceeded.Create(new FloatLiteral(token.Value, token.Suffix), input.Environment, input.Source.Skip(1));
|
||||
}
|
||||
}
|
||||
|
||||
public class StringLiteralParser : IParser<Expr> {
|
||||
public class StringLiteralParser : IParser<Expr>
|
||||
{
|
||||
public RuleCombining Combining => RuleCombining.NONE;
|
||||
public IParserResult<Expr> Parse(ParserInput input) {
|
||||
public IParserResult<Expr> Parse(ParserInput input)
|
||||
{
|
||||
var token = input.Source.First() as TokenString;
|
||||
if (token == null) {
|
||||
if (token == null)
|
||||
{
|
||||
return new ParserFailed<Expr>(input);
|
||||
}
|
||||
return ParserSucceeded.Create(new StringLiteral(token.Val), input.Environment, input.Source.Skip(1));
|
||||
@ -76,8 +90,10 @@ namespace Parsing {
|
||||
}
|
||||
}
|
||||
|
||||
public class BinaryOperatorBuilder {
|
||||
public BinaryOperatorBuilder(IConsumer operatorConsumer, Func<Expr, Expr, Expr> nodeCreator) {
|
||||
public class BinaryOperatorBuilder
|
||||
{
|
||||
public BinaryOperatorBuilder(IConsumer operatorConsumer, Func<Expr, Expr, Expr> nodeCreator)
|
||||
{
|
||||
this.OperatorConsumer = operatorConsumer;
|
||||
this.NodeCreator = nodeCreator;
|
||||
}
|
||||
@ -89,29 +105,93 @@ namespace Parsing {
|
||||
public Func<Expr, Expr, Expr> NodeCreator { get; }
|
||||
}
|
||||
|
||||
// TODO: create a dedicated class for this.
|
||||
public static IParser<Expr> BinaryOperator(IParser<Expr> operandParser, params BinaryOperatorBuilder[] builders) {
|
||||
ImmutableList<ITransformer<Expr, Expr>> transformers = builders.Select(builder =>
|
||||
Given<Expr>()
|
||||
.Then(builder.OperatorConsumer)
|
||||
.Then(operandParser)
|
||||
.Then(builder.NodeCreator)
|
||||
).ToImmutableList();
|
||||
return operandParser.Then((new OrTransformer<Expr, Expr>(transformers)).ZeroOrMore());
|
||||
public class OperatorParser : IParser<Expr>
|
||||
{
|
||||
private IParser<Expr> lhsParser;
|
||||
private IParser<Expr> rhsParser;
|
||||
private readonly ImmutableList<BinaryOperatorBuilder> builders;
|
||||
private readonly bool needsOne;
|
||||
|
||||
public OperatorParser(IParser<Expr> operandParser, IEnumerable<BinaryOperatorBuilder> builders) : this(operandParser, operandParser, builders)
|
||||
{
|
||||
needsOne = false;
|
||||
}
|
||||
|
||||
public OperatorParser(IParser<Expr> lhsParser, IParser<Expr> rhsParser, IEnumerable<BinaryOperatorBuilder> builders)
|
||||
{
|
||||
this.lhsParser = lhsParser;
|
||||
this.rhsParser = rhsParser;
|
||||
this.builders = builders.ToImmutableList();
|
||||
needsOne = true;
|
||||
}
|
||||
|
||||
public RuleCombining Combining => RuleCombining.THEN;
|
||||
|
||||
public IParserResult<Expr> Parse(ParserInput input)
|
||||
{
|
||||
var firstResult = lhsParser.Parse(input);
|
||||
if (!firstResult.IsSuccessful)
|
||||
{
|
||||
return new ParserFailed<Expr>(firstResult);
|
||||
}
|
||||
|
||||
return Transform(firstResult.Result, firstResult.ToInput());
|
||||
}
|
||||
|
||||
private IParserResult<Expr> TransformImpl(Expr seed, ParserInput input)
|
||||
{
|
||||
List<IParserFailed> failed = new List<IParserFailed>();
|
||||
foreach (var builder in builders) {
|
||||
var given = ParserSucceeded.Create(seed, input.Environment, input.Source);
|
||||
var result1 = builder.OperatorConsumer.Consume(given.ToInput());
|
||||
if (!result1.IsSuccessful)
|
||||
{
|
||||
failed.Add(new ParserFailed<Expr>(result1));
|
||||
continue;
|
||||
}
|
||||
var result2 = rhsParser.Parse(result1.ToInput());
|
||||
if (!result2.IsSuccessful)
|
||||
{
|
||||
failed.Add(new ParserFailed<Expr>(result2));
|
||||
continue;
|
||||
}
|
||||
|
||||
var transform = builder.NodeCreator(seed, result2.Result);
|
||||
var ret = ParserSucceeded.Create(transform, result2.Environment, result2.Source);
|
||||
var expr = transform as IStoredLineInfo;
|
||||
if (expr != null)
|
||||
{
|
||||
expr.Copy(ret);
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
return new ParserFailed<Expr>(input, failed);
|
||||
}
|
||||
|
||||
public IParserResult<Expr> Transform(Expr seed, ParserInput input)
|
||||
{
|
||||
IParserResult<Expr> curResult = needsOne ? TransformImpl(seed, input) : ParserSucceeded.Create(seed, input.Environment, input.Source);
|
||||
|
||||
if (!curResult.IsSuccessful) return new ParserFailed<Expr>(curResult);
|
||||
|
||||
IParserResult<Expr> lastSuccessfulResult;
|
||||
do
|
||||
{
|
||||
lastSuccessfulResult = curResult;
|
||||
curResult = TransformImpl(lastSuccessfulResult.Result, lastSuccessfulResult.ToInput());
|
||||
} while (curResult.IsSuccessful);
|
||||
|
||||
return lastSuccessfulResult;
|
||||
}
|
||||
}
|
||||
|
||||
public static IParser<Expr> BinaryOperator(IParser<Expr> operandParser, params BinaryOperatorBuilder[] builders)
|
||||
=> new OperatorParser(operandParser, builders);
|
||||
|
||||
public static IParser<Expr> AssignmentOperator(
|
||||
IParser<Expr> lhsParser,
|
||||
IParser<Expr> rhsParser,
|
||||
params BinaryOperatorBuilder[] builders
|
||||
) {
|
||||
var transformers = builders.Select(builder =>
|
||||
Given<Expr>()
|
||||
.Then(builder.OperatorConsumer)
|
||||
.Then(rhsParser)
|
||||
.Then(builder.NodeCreator)
|
||||
).ToImmutableList();
|
||||
return lhsParser.Then((new OrTransformer<Expr, Expr>(transformers)).OneOrMore());
|
||||
}
|
||||
) => new OperatorParser(lhsParser, rhsParser, builders);
|
||||
}
|
||||
}
|
@ -1,4 +1,5 @@
|
||||
using System;
|
||||
using System.Text;
|
||||
|
||||
namespace LexicalAnalysis {
|
||||
/// <summary>
|
||||
@ -61,7 +62,7 @@ namespace LexicalAnalysis {
|
||||
}
|
||||
|
||||
private State _state;
|
||||
private String _scanned;
|
||||
private StringBuilder _scanned;
|
||||
|
||||
// quote : Char
|
||||
// ============
|
||||
@ -71,11 +72,11 @@ namespace LexicalAnalysis {
|
||||
public FSAChar(Char quote) {
|
||||
this._state = State.START;
|
||||
this._quote = quote;
|
||||
this._scanned = "";
|
||||
this._scanned = new StringBuilder();
|
||||
}
|
||||
|
||||
public override void Reset() {
|
||||
this._scanned = "";
|
||||
this._scanned.Clear();
|
||||
this._state = State.START;
|
||||
}
|
||||
|
||||
@ -106,7 +107,7 @@ namespace LexicalAnalysis {
|
||||
// ==========================
|
||||
//
|
||||
public String RetrieveRaw() {
|
||||
return this._scanned.Substring(0, this._scanned.Length - 1);
|
||||
return this._scanned.ToString(0, this._scanned.Length - 1);
|
||||
}
|
||||
|
||||
// RetrieveChar : () -> Char
|
||||
@ -157,7 +158,7 @@ namespace LexicalAnalysis {
|
||||
// Implementation of the FSA
|
||||
//
|
||||
public override void ReadChar(Char ch) {
|
||||
this._scanned = this._scanned + ch;
|
||||
this._scanned = this._scanned.Append(ch);
|
||||
switch (this._state) {
|
||||
case State.END:
|
||||
case State.ERROR:
|
||||
@ -230,7 +231,7 @@ namespace LexicalAnalysis {
|
||||
// ==================
|
||||
//
|
||||
public override void ReadEOF() {
|
||||
this._scanned = this._scanned + '0';
|
||||
this._scanned = this._scanned.Append('0');
|
||||
switch (this._state) {
|
||||
case State.C:
|
||||
case State.SO:
|
||||
|
@ -1,4 +1,5 @@
|
||||
using System;
|
||||
using System.Text;
|
||||
|
||||
namespace LexicalAnalysis {
|
||||
/// <summary>
|
||||
@ -60,7 +61,7 @@ namespace LexicalAnalysis {
|
||||
DPL
|
||||
};
|
||||
|
||||
private String _raw;
|
||||
private StringBuilder _raw;
|
||||
private Int64 _intPart;
|
||||
private Int64 _fracPart;
|
||||
private Int64 _fracCount;
|
||||
@ -77,7 +78,7 @@ namespace LexicalAnalysis {
|
||||
this._expPart = 0;
|
||||
this._suffix = TokenFloat.FloatSuffix.NONE;
|
||||
this._expPos = true;
|
||||
this._raw = "";
|
||||
this._raw = new StringBuilder();
|
||||
}
|
||||
|
||||
public override void Reset() {
|
||||
@ -88,7 +89,7 @@ namespace LexicalAnalysis {
|
||||
this._expPart = 0;
|
||||
this._suffix = TokenFloat.FloatSuffix.NONE;
|
||||
this._expPos = true;
|
||||
this._raw = "";
|
||||
this._raw.Clear();
|
||||
}
|
||||
|
||||
public override FSAStatus GetStatus() {
|
||||
@ -111,11 +112,11 @@ namespace LexicalAnalysis {
|
||||
} else {
|
||||
val = (this._intPart + this._fracPart * Math.Pow(0.1, this._fracCount)) * Math.Pow(10, -this._expPart);
|
||||
}
|
||||
return new TokenFloat(val, this._suffix, this._raw.Substring(0, this._raw.Length - 1));
|
||||
return new TokenFloat(val, this._suffix, this._raw.ToString(0, this._raw.Length - 1));
|
||||
}
|
||||
|
||||
public override void ReadChar(Char ch) {
|
||||
this._raw += ch;
|
||||
this._raw.Append(ch);
|
||||
switch (this._state) {
|
||||
case State.ERROR:
|
||||
case State.END:
|
||||
|
@ -1,4 +1,6 @@
|
||||
using System;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
|
||||
namespace LexicalAnalysis {
|
||||
/// <summary>
|
||||
@ -24,16 +26,16 @@ namespace LexicalAnalysis {
|
||||
ID
|
||||
};
|
||||
private State _state;
|
||||
private String _scanned;
|
||||
private StringBuilder _scanned;
|
||||
|
||||
public FSAIdentifier() {
|
||||
this._state = State.START;
|
||||
this._scanned = "";
|
||||
this._scanned = new StringBuilder();
|
||||
}
|
||||
|
||||
public override void Reset() {
|
||||
this._state = State.START;
|
||||
this._scanned = "";
|
||||
this._scanned.Clear();
|
||||
}
|
||||
|
||||
public override FSAStatus GetStatus() {
|
||||
@ -50,7 +52,7 @@ namespace LexicalAnalysis {
|
||||
}
|
||||
|
||||
public override Token RetrieveToken() {
|
||||
String name = this._scanned.Substring(0, this._scanned.Length - 1);
|
||||
String name = this._scanned.ToString(0, this._scanned.Length - 1);
|
||||
if (TokenKeyword.Keywords.ContainsKey(name)) {
|
||||
return new TokenKeyword(TokenKeyword.Keywords[name]);
|
||||
}
|
||||
@ -58,7 +60,7 @@ namespace LexicalAnalysis {
|
||||
}
|
||||
|
||||
public override void ReadChar(Char ch) {
|
||||
this._scanned = this._scanned + ch;
|
||||
this._scanned = this._scanned.Append(ch);
|
||||
switch (this._state) {
|
||||
case State.END:
|
||||
case State.ERROR:
|
||||
@ -82,7 +84,7 @@ namespace LexicalAnalysis {
|
||||
}
|
||||
|
||||
public override void ReadEOF() {
|
||||
this._scanned = this._scanned + '0';
|
||||
this._scanned = this._scanned.Append('0');
|
||||
switch (this._state) {
|
||||
case State.ID:
|
||||
this._state = State.END;
|
||||
|
@ -1,4 +1,5 @@
|
||||
using System;
|
||||
using System.Text;
|
||||
|
||||
namespace LexicalAnalysis {
|
||||
/// <summary>
|
||||
@ -69,21 +70,21 @@ namespace LexicalAnalysis {
|
||||
};
|
||||
|
||||
private Int64 _val;
|
||||
private String _raw;
|
||||
private StringBuilder _raw;
|
||||
private TokenInt.IntSuffix _suffix;
|
||||
private State _state;
|
||||
|
||||
public FSAInt() {
|
||||
this._state = State.START;
|
||||
this._val = 0;
|
||||
this._raw = "";
|
||||
this._raw = new StringBuilder();
|
||||
this._suffix = TokenInt.IntSuffix.NONE;
|
||||
}
|
||||
|
||||
public override void Reset() {
|
||||
this._state = State.START;
|
||||
this._val = 0;
|
||||
this._raw = "";
|
||||
this._raw.Clear();
|
||||
this._suffix = TokenInt.IntSuffix.NONE;
|
||||
}
|
||||
|
||||
@ -101,11 +102,11 @@ namespace LexicalAnalysis {
|
||||
}
|
||||
|
||||
public override Token RetrieveToken() {
|
||||
return new TokenInt(this._val, this._suffix, this._raw.Substring(0, this._raw.Length - 1));
|
||||
return new TokenInt(this._val, this._suffix, this._raw.ToString(0, this._raw.Length - 1));
|
||||
}
|
||||
|
||||
public override void ReadChar(Char ch) {
|
||||
this._raw += ch;
|
||||
this._raw.Append(ch);
|
||||
switch (this._state) {
|
||||
case State.ERROR:
|
||||
case State.END:
|
||||
|
@ -2,6 +2,7 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
|
||||
namespace LexicalAnalysis {
|
||||
/// <summary>
|
||||
@ -166,16 +167,16 @@ namespace LexicalAnalysis {
|
||||
);
|
||||
|
||||
private State _state;
|
||||
private String _scanned;
|
||||
private StringBuilder _scanned;
|
||||
|
||||
public FSAOperator() {
|
||||
this._state = State.START;
|
||||
this._scanned = "";
|
||||
this._scanned = new StringBuilder();
|
||||
}
|
||||
|
||||
public override sealed void Reset() {
|
||||
this._state = State.START;
|
||||
this._scanned = "";
|
||||
this._scanned.Clear();
|
||||
}
|
||||
|
||||
public override sealed FSAStatus GetStatus() {
|
||||
@ -192,11 +193,11 @@ namespace LexicalAnalysis {
|
||||
}
|
||||
|
||||
public override sealed Token RetrieveToken() {
|
||||
return new TokenOperator(TokenOperator.Operators[this._scanned.Substring(0, this._scanned.Length - 1)]);
|
||||
return new TokenOperator(TokenOperator.Operators[this._scanned.ToString(0, this._scanned.Length - 1)]);
|
||||
}
|
||||
|
||||
public override sealed void ReadChar(Char ch) {
|
||||
this._scanned = this._scanned + ch;
|
||||
this._scanned = this._scanned.Append(ch);
|
||||
switch (this._state) {
|
||||
case State.END:
|
||||
case State.ERROR:
|
||||
@ -386,7 +387,7 @@ namespace LexicalAnalysis {
|
||||
}
|
||||
|
||||
public override sealed void ReadEOF() {
|
||||
this._scanned = this._scanned + '0';
|
||||
this._scanned = this._scanned.Append('0');
|
||||
switch (this._state) {
|
||||
case State.FINISH:
|
||||
case State.SUB:
|
||||
|
@ -36,21 +36,31 @@ namespace LexicalAnalysis {
|
||||
private IEnumerable<Token> Lex() {
|
||||
var tokens = new List<Token>();
|
||||
int line = 1, column = 1, lastColumn = column;
|
||||
char lastChr = '\0';
|
||||
for (Int32 i = 0; i < this.Source.Length; ++i) {
|
||||
if (i > 0 && this.Source[i - 1] == '\n')
|
||||
if (lastChr == '\n')
|
||||
{
|
||||
line++;
|
||||
lastColumn = 1;
|
||||
column = 1;
|
||||
}
|
||||
else column++;
|
||||
this.FSAs.ForEach(fsa => fsa.ReadChar(this.Source[i]));
|
||||
bool isRunning = false;
|
||||
int endIdx = -1;
|
||||
var chr = Source[i];
|
||||
for (int fsaIdx = 0; fsaIdx < FSAs.Count; fsaIdx++)
|
||||
{
|
||||
var fsa = FSAs[fsaIdx];
|
||||
fsa.ReadChar(chr);
|
||||
var status = fsa.GetStatus();
|
||||
if (status == FSAStatus.RUNNING) isRunning = true;
|
||||
else if (endIdx == -1 && status == FSAStatus.END) endIdx = fsaIdx;
|
||||
}
|
||||
|
||||
// if no running
|
||||
if (this.FSAs.FindIndex(fsa => fsa.GetStatus() == FSAStatus.RUNNING) == -1) {
|
||||
Int32 idx = this.FSAs.FindIndex(fsa => fsa.GetStatus() == FSAStatus.END);
|
||||
if (idx != -1) {
|
||||
Token token = this.FSAs[idx].RetrieveToken();
|
||||
if (!isRunning) {
|
||||
if (endIdx != -1) {
|
||||
Token token = this.FSAs[endIdx].RetrieveToken();
|
||||
if (token.Kind != TokenKind.NONE) {
|
||||
token.Line = line;
|
||||
token.Column = lastColumn;
|
||||
@ -58,19 +68,26 @@ namespace LexicalAnalysis {
|
||||
tokens.Add(token);
|
||||
}
|
||||
i--; column--;
|
||||
if (this.Source[i] == '\n') line--;
|
||||
this.FSAs.ForEach(fsa => fsa.Reset());
|
||||
if (lastChr == '\n') line--;
|
||||
foreach (var fsa in FSAs) fsa.Reset();
|
||||
} else {
|
||||
Console.WriteLine("error");
|
||||
}
|
||||
}
|
||||
if (!isRunning || endIdx == -1) lastChr = chr;
|
||||
}
|
||||
|
||||
this.FSAs.ForEach(fsa => fsa.ReadEOF());
|
||||
var endIdx2 = -1;
|
||||
for (int fsaIdx = 0; fsaIdx < FSAs.Count; fsaIdx++)
|
||||
{
|
||||
var fsa = FSAs[fsaIdx];
|
||||
fsa.ReadEOF();
|
||||
if (endIdx2 != -1) continue;
|
||||
if (fsa.GetStatus() == FSAStatus.END) endIdx2 = fsaIdx;
|
||||
}
|
||||
// find END
|
||||
Int32 idx2 = this.FSAs.FindIndex(fsa => fsa.GetStatus() == FSAStatus.END);
|
||||
if (idx2 != -1) {
|
||||
Token token = this.FSAs[idx2].RetrieveToken();
|
||||
if (endIdx2 != -1) {
|
||||
Token token = this.FSAs[endIdx2].RetrieveToken();
|
||||
if (token.Kind != TokenKind.NONE) {
|
||||
token.Line = line;
|
||||
token.Column = column + 1;
|
||||
|
@ -1,4 +1,5 @@
|
||||
using System;
|
||||
using System.Text;
|
||||
|
||||
namespace LexicalAnalysis {
|
||||
/// <summary>
|
||||
@ -54,22 +55,22 @@ namespace LexicalAnalysis {
|
||||
|
||||
private State _state;
|
||||
private readonly FSAChar _fsachar;
|
||||
private String _val;
|
||||
private String _raw;
|
||||
private StringBuilder _val;
|
||||
private StringBuilder _raw;
|
||||
private bool unicode = false;
|
||||
|
||||
public FSAString() {
|
||||
this._state = State.START;
|
||||
this._fsachar = new FSAChar('\"');
|
||||
this._raw = "";
|
||||
this._val = "";
|
||||
this._raw = new StringBuilder();
|
||||
this._val = new StringBuilder();
|
||||
}
|
||||
|
||||
public override void Reset() {
|
||||
this._state = State.START;
|
||||
this._fsachar.Reset();
|
||||
this._raw = "";
|
||||
this._val = "";
|
||||
this._raw.Clear();
|
||||
this._val.Clear();
|
||||
unicode = false;
|
||||
}
|
||||
|
||||
@ -87,8 +88,8 @@ namespace LexicalAnalysis {
|
||||
}
|
||||
|
||||
public override Token RetrieveToken() {
|
||||
if (unicode) return new TokenUnicodeString(this._val, this._raw);
|
||||
return new TokenString(this._val, this._raw);
|
||||
if (unicode) return new TokenUnicodeString(this._val.ToString(), this._raw.ToString());
|
||||
return new TokenString(this._val.ToString(), this._raw.ToString());
|
||||
}
|
||||
|
||||
public override void ReadChar(Char ch) {
|
||||
@ -129,8 +130,8 @@ namespace LexicalAnalysis {
|
||||
switch (this._fsachar.GetStatus()) {
|
||||
case FSAStatus.END:
|
||||
this._state = State.Q;
|
||||
this._val = this._val + this._fsachar.RetrieveChar();
|
||||
this._raw = this._raw + this._fsachar.RetrieveRaw();
|
||||
this._val.Append(this._fsachar.RetrieveChar());
|
||||
this._raw.Append(this._fsachar.RetrieveRaw());
|
||||
this._fsachar.Reset();
|
||||
ReadChar(ch);
|
||||
break;
|
||||
|
Loading…
Reference in New Issue
Block a user