cc: add timer for each stage (via compile time config)

cc: optimise lexer for speed (~10x speed improvement)
cc: implement OperatorParser class as per TODO in original codebase (seems refactoring chains of parser-combinators leads to at least SOME speed improvement)
This commit is contained in:
zc 2023-07-11 17:57:57 +01:00
parent b20d5d88b5
commit adc9a2dde1
9 changed files with 207 additions and 84 deletions

View File

@ -8,30 +8,49 @@ using Parsing;
namespace Driver { namespace Driver {
public class Compiler { public class Compiler {
private static readonly bool SHOW_TIME = false;
private static System.Diagnostics.Stopwatch StartTimer()
{
var watch = new System.Diagnostics.Stopwatch();
watch.Start();
return watch;
}
private Compiler(String source) { private Compiler(String source) {
this.Source = source; this.Source = source;
// Lexical analysis // Lexical analysis
var watch = StartTimer();
Scanner scanner = new Scanner(source); Scanner scanner = new Scanner(source);
this.Tokens = scanner.Tokens.ToImmutableList(); this.Tokens = scanner.Tokens.ToImmutableList();
watch.Stop();
if (SHOW_TIME) Console.WriteLine("lexer: {0} ms", watch.ElapsedMilliseconds);
// Parse // Parse
watch = StartTimer();
var parserResult = CParsers.Parse(this.Tokens); var parserResult = CParsers.Parse(this.Tokens);
watch.Stop();
if (SHOW_TIME) Console.WriteLine("parser: {0} ms", watch.ElapsedMilliseconds);
if (!parserResult.IsSuccessful || parserResult.Source.Count() != 1) { if (!parserResult.IsSuccessful || parserResult.Source.Count() != 1) {
throw new InvalidOperationException($"Parsing error:\n{parserResult}"); throw new InvalidOperationException($"Parsing error:\n{parserResult}");
} }
this.SyntaxTree = parserResult.Result; this.SyntaxTree = parserResult.Result;
// Semantic analysis // Semantic analysis
watch = StartTimer();
var semantReturn = this.SyntaxTree.GetTranslnUnit(); var semantReturn = this.SyntaxTree.GetTranslnUnit();
watch.Stop();
if (SHOW_TIME) Console.WriteLine("semant: {0} ms", watch.ElapsedMilliseconds);
this.AbstractSyntaxTree = semantReturn.Value; this.AbstractSyntaxTree = semantReturn.Value;
this.Environment = semantReturn.Env; this.Environment = semantReturn.Env;
// Code generation // Code generation
watch = StartTimer();
var state = new CGenState(); var state = new CGenState();
this.AbstractSyntaxTree.CodeGenerate(state); this.AbstractSyntaxTree.CodeGenerate(state);
state.EmitCallsToCtor(); state.EmitCallsToCtor();
this.Script = state.Script; this.Script = state.Script;
watch.Stop();
if (SHOW_TIME) Console.WriteLine("codegen: {0} ms", watch.ElapsedMilliseconds);
} }
public static Compiler FromSource(String src) { public static Compiler FromSource(String src) {

View File

@ -7,8 +7,10 @@ using static Parsing.ParserCombinator;
using System.Collections.Immutable; using System.Collections.Immutable;
namespace Parsing { namespace Parsing {
public partial class CParsers { public partial class CParsers
static CParsers() { {
static CParsers()
{
SetExpressionRules(); SetExpressionRules();
SetDeclarationRules(); SetDeclarationRules();
SetExternalDefinitionRules(); SetExternalDefinitionRules();
@ -18,44 +20,56 @@ namespace Parsing {
public static IParserResult<TranslnUnit> Parse(IEnumerable<Token> tokens) => public static IParserResult<TranslnUnit> Parse(IEnumerable<Token> tokens) =>
TranslationUnit.Parse(new ParserInput(new ParserEnvironment(), tokens)); TranslationUnit.Parse(new ParserInput(new ParserEnvironment(), tokens));
public class ConstCharParser : IParser<Expr> { public class ConstCharParser : IParser<Expr>
{
public RuleCombining Combining => RuleCombining.NONE; public RuleCombining Combining => RuleCombining.NONE;
public IParserResult<Expr> Parse(ParserInput input) { public IParserResult<Expr> Parse(ParserInput input)
{
var token = input.Source.First() as TokenCharConst; var token = input.Source.First() as TokenCharConst;
if (token == null) { if (token == null)
{
return new ParserFailed<Expr>(input); return new ParserFailed<Expr>(input);
} }
return ParserSucceeded.Create(new IntLiteral(token.Value, TokenInt.IntSuffix.NONE), input.Environment, input.Source.Skip(1)); return ParserSucceeded.Create(new IntLiteral(token.Value, TokenInt.IntSuffix.NONE), input.Environment, input.Source.Skip(1));
} }
} }
public class ConstIntParser : IParser<Expr> { public class ConstIntParser : IParser<Expr>
{
public RuleCombining Combining => RuleCombining.NONE; public RuleCombining Combining => RuleCombining.NONE;
public IParserResult<Expr> Parse(ParserInput input) { public IParserResult<Expr> Parse(ParserInput input)
{
var token = input.Source.First() as TokenInt; var token = input.Source.First() as TokenInt;
if (token == null) { if (token == null)
{
return new ParserFailed<Expr>(input); return new ParserFailed<Expr>(input);
} }
return ParserSucceeded.Create(new IntLiteral(token.Val, token.Suffix), input.Environment, input.Source.Skip(1)); return ParserSucceeded.Create(new IntLiteral(token.Val, token.Suffix), input.Environment, input.Source.Skip(1));
} }
} }
public class ConstFloatParser : IParser<Expr> { public class ConstFloatParser : IParser<Expr>
{
public RuleCombining Combining => RuleCombining.NONE; public RuleCombining Combining => RuleCombining.NONE;
public IParserResult<Expr> Parse(ParserInput input) { public IParserResult<Expr> Parse(ParserInput input)
{
var token = input.Source.First() as TokenFloat; var token = input.Source.First() as TokenFloat;
if (token == null) { if (token == null)
{
return new ParserFailed<Expr>(input); return new ParserFailed<Expr>(input);
} }
return ParserSucceeded.Create(new FloatLiteral(token.Value, token.Suffix), input.Environment, input.Source.Skip(1)); return ParserSucceeded.Create(new FloatLiteral(token.Value, token.Suffix), input.Environment, input.Source.Skip(1));
} }
} }
public class StringLiteralParser : IParser<Expr> { public class StringLiteralParser : IParser<Expr>
{
public RuleCombining Combining => RuleCombining.NONE; public RuleCombining Combining => RuleCombining.NONE;
public IParserResult<Expr> Parse(ParserInput input) { public IParserResult<Expr> Parse(ParserInput input)
{
var token = input.Source.First() as TokenString; var token = input.Source.First() as TokenString;
if (token == null) { if (token == null)
{
return new ParserFailed<Expr>(input); return new ParserFailed<Expr>(input);
} }
return ParserSucceeded.Create(new StringLiteral(token.Val), input.Environment, input.Source.Skip(1)); return ParserSucceeded.Create(new StringLiteral(token.Val), input.Environment, input.Source.Skip(1));
@ -76,8 +90,10 @@ namespace Parsing {
} }
} }
public class BinaryOperatorBuilder { public class BinaryOperatorBuilder
public BinaryOperatorBuilder(IConsumer operatorConsumer, Func<Expr, Expr, Expr> nodeCreator) { {
public BinaryOperatorBuilder(IConsumer operatorConsumer, Func<Expr, Expr, Expr> nodeCreator)
{
this.OperatorConsumer = operatorConsumer; this.OperatorConsumer = operatorConsumer;
this.NodeCreator = nodeCreator; this.NodeCreator = nodeCreator;
} }
@ -89,29 +105,93 @@ namespace Parsing {
public Func<Expr, Expr, Expr> NodeCreator { get; } public Func<Expr, Expr, Expr> NodeCreator { get; }
} }
// TODO: create a dedicated class for this. public class OperatorParser : IParser<Expr>
public static IParser<Expr> BinaryOperator(IParser<Expr> operandParser, params BinaryOperatorBuilder[] builders) { {
ImmutableList<ITransformer<Expr, Expr>> transformers = builders.Select(builder => private IParser<Expr> lhsParser;
Given<Expr>() private IParser<Expr> rhsParser;
.Then(builder.OperatorConsumer) private readonly ImmutableList<BinaryOperatorBuilder> builders;
.Then(operandParser) private readonly bool needsOne;
.Then(builder.NodeCreator)
).ToImmutableList(); public OperatorParser(IParser<Expr> operandParser, IEnumerable<BinaryOperatorBuilder> builders) : this(operandParser, operandParser, builders)
return operandParser.Then((new OrTransformer<Expr, Expr>(transformers)).ZeroOrMore()); {
needsOne = false;
}
public OperatorParser(IParser<Expr> lhsParser, IParser<Expr> rhsParser, IEnumerable<BinaryOperatorBuilder> builders)
{
this.lhsParser = lhsParser;
this.rhsParser = rhsParser;
this.builders = builders.ToImmutableList();
needsOne = true;
}
public RuleCombining Combining => RuleCombining.THEN;
public IParserResult<Expr> Parse(ParserInput input)
{
var firstResult = lhsParser.Parse(input);
if (!firstResult.IsSuccessful)
{
return new ParserFailed<Expr>(firstResult);
}
return Transform(firstResult.Result, firstResult.ToInput());
}
private IParserResult<Expr> TransformImpl(Expr seed, ParserInput input)
{
List<IParserFailed> failed = new List<IParserFailed>();
foreach (var builder in builders) {
var given = ParserSucceeded.Create(seed, input.Environment, input.Source);
var result1 = builder.OperatorConsumer.Consume(given.ToInput());
if (!result1.IsSuccessful)
{
failed.Add(new ParserFailed<Expr>(result1));
continue;
}
var result2 = rhsParser.Parse(result1.ToInput());
if (!result2.IsSuccessful)
{
failed.Add(new ParserFailed<Expr>(result2));
continue;
}
var transform = builder.NodeCreator(seed, result2.Result);
var ret = ParserSucceeded.Create(transform, result2.Environment, result2.Source);
var expr = transform as IStoredLineInfo;
if (expr != null)
{
expr.Copy(ret);
}
return ret;
}
return new ParserFailed<Expr>(input, failed);
}
public IParserResult<Expr> Transform(Expr seed, ParserInput input)
{
IParserResult<Expr> curResult = needsOne ? TransformImpl(seed, input) : ParserSucceeded.Create(seed, input.Environment, input.Source);
if (!curResult.IsSuccessful) return new ParserFailed<Expr>(curResult);
IParserResult<Expr> lastSuccessfulResult;
do
{
lastSuccessfulResult = curResult;
curResult = TransformImpl(lastSuccessfulResult.Result, lastSuccessfulResult.ToInput());
} while (curResult.IsSuccessful);
return lastSuccessfulResult;
}
} }
public static IParser<Expr> BinaryOperator(IParser<Expr> operandParser, params BinaryOperatorBuilder[] builders)
=> new OperatorParser(operandParser, builders);
public static IParser<Expr> AssignmentOperator( public static IParser<Expr> AssignmentOperator(
IParser<Expr> lhsParser, IParser<Expr> lhsParser,
IParser<Expr> rhsParser, IParser<Expr> rhsParser,
params BinaryOperatorBuilder[] builders params BinaryOperatorBuilder[] builders
) { ) => new OperatorParser(lhsParser, rhsParser, builders);
var transformers = builders.Select(builder =>
Given<Expr>()
.Then(builder.OperatorConsumer)
.Then(rhsParser)
.Then(builder.NodeCreator)
).ToImmutableList();
return lhsParser.Then((new OrTransformer<Expr, Expr>(transformers)).OneOrMore());
}
} }
} }

View File

@ -1,4 +1,5 @@
using System; using System;
using System.Text;
namespace LexicalAnalysis { namespace LexicalAnalysis {
/// <summary> /// <summary>
@ -61,7 +62,7 @@ namespace LexicalAnalysis {
} }
private State _state; private State _state;
private String _scanned; private StringBuilder _scanned;
// quote : Char // quote : Char
// ============ // ============
@ -71,11 +72,11 @@ namespace LexicalAnalysis {
public FSAChar(Char quote) { public FSAChar(Char quote) {
this._state = State.START; this._state = State.START;
this._quote = quote; this._quote = quote;
this._scanned = ""; this._scanned = new StringBuilder();
} }
public override void Reset() { public override void Reset() {
this._scanned = ""; this._scanned.Clear();
this._state = State.START; this._state = State.START;
} }
@ -106,7 +107,7 @@ namespace LexicalAnalysis {
// ========================== // ==========================
// //
public String RetrieveRaw() { public String RetrieveRaw() {
return this._scanned.Substring(0, this._scanned.Length - 1); return this._scanned.ToString(0, this._scanned.Length - 1);
} }
// RetrieveChar : () -> Char // RetrieveChar : () -> Char
@ -157,7 +158,7 @@ namespace LexicalAnalysis {
// Implementation of the FSA // Implementation of the FSA
// //
public override void ReadChar(Char ch) { public override void ReadChar(Char ch) {
this._scanned = this._scanned + ch; this._scanned = this._scanned.Append(ch);
switch (this._state) { switch (this._state) {
case State.END: case State.END:
case State.ERROR: case State.ERROR:
@ -230,7 +231,7 @@ namespace LexicalAnalysis {
// ================== // ==================
// //
public override void ReadEOF() { public override void ReadEOF() {
this._scanned = this._scanned + '0'; this._scanned = this._scanned.Append('0');
switch (this._state) { switch (this._state) {
case State.C: case State.C:
case State.SO: case State.SO:

View File

@ -1,4 +1,5 @@
using System; using System;
using System.Text;
namespace LexicalAnalysis { namespace LexicalAnalysis {
/// <summary> /// <summary>
@ -60,7 +61,7 @@ namespace LexicalAnalysis {
DPL DPL
}; };
private String _raw; private StringBuilder _raw;
private Int64 _intPart; private Int64 _intPart;
private Int64 _fracPart; private Int64 _fracPart;
private Int64 _fracCount; private Int64 _fracCount;
@ -77,7 +78,7 @@ namespace LexicalAnalysis {
this._expPart = 0; this._expPart = 0;
this._suffix = TokenFloat.FloatSuffix.NONE; this._suffix = TokenFloat.FloatSuffix.NONE;
this._expPos = true; this._expPos = true;
this._raw = ""; this._raw = new StringBuilder();
} }
public override void Reset() { public override void Reset() {
@ -88,7 +89,7 @@ namespace LexicalAnalysis {
this._expPart = 0; this._expPart = 0;
this._suffix = TokenFloat.FloatSuffix.NONE; this._suffix = TokenFloat.FloatSuffix.NONE;
this._expPos = true; this._expPos = true;
this._raw = ""; this._raw.Clear();
} }
public override FSAStatus GetStatus() { public override FSAStatus GetStatus() {
@ -111,11 +112,11 @@ namespace LexicalAnalysis {
} else { } else {
val = (this._intPart + this._fracPart * Math.Pow(0.1, this._fracCount)) * Math.Pow(10, -this._expPart); val = (this._intPart + this._fracPart * Math.Pow(0.1, this._fracCount)) * Math.Pow(10, -this._expPart);
} }
return new TokenFloat(val, this._suffix, this._raw.Substring(0, this._raw.Length - 1)); return new TokenFloat(val, this._suffix, this._raw.ToString(0, this._raw.Length - 1));
} }
public override void ReadChar(Char ch) { public override void ReadChar(Char ch) {
this._raw += ch; this._raw.Append(ch);
switch (this._state) { switch (this._state) {
case State.ERROR: case State.ERROR:
case State.END: case State.END:

View File

@ -1,4 +1,6 @@
using System; using System;
using System.Linq;
using System.Text;
namespace LexicalAnalysis { namespace LexicalAnalysis {
/// <summary> /// <summary>
@ -24,16 +26,16 @@ namespace LexicalAnalysis {
ID ID
}; };
private State _state; private State _state;
private String _scanned; private StringBuilder _scanned;
public FSAIdentifier() { public FSAIdentifier() {
this._state = State.START; this._state = State.START;
this._scanned = ""; this._scanned = new StringBuilder();
} }
public override void Reset() { public override void Reset() {
this._state = State.START; this._state = State.START;
this._scanned = ""; this._scanned.Clear();
} }
public override FSAStatus GetStatus() { public override FSAStatus GetStatus() {
@ -50,7 +52,7 @@ namespace LexicalAnalysis {
} }
public override Token RetrieveToken() { public override Token RetrieveToken() {
String name = this._scanned.Substring(0, this._scanned.Length - 1); String name = this._scanned.ToString(0, this._scanned.Length - 1);
if (TokenKeyword.Keywords.ContainsKey(name)) { if (TokenKeyword.Keywords.ContainsKey(name)) {
return new TokenKeyword(TokenKeyword.Keywords[name]); return new TokenKeyword(TokenKeyword.Keywords[name]);
} }
@ -58,7 +60,7 @@ namespace LexicalAnalysis {
} }
public override void ReadChar(Char ch) { public override void ReadChar(Char ch) {
this._scanned = this._scanned + ch; this._scanned = this._scanned.Append(ch);
switch (this._state) { switch (this._state) {
case State.END: case State.END:
case State.ERROR: case State.ERROR:
@ -82,7 +84,7 @@ namespace LexicalAnalysis {
} }
public override void ReadEOF() { public override void ReadEOF() {
this._scanned = this._scanned + '0'; this._scanned = this._scanned.Append('0');
switch (this._state) { switch (this._state) {
case State.ID: case State.ID:
this._state = State.END; this._state = State.END;

View File

@ -1,4 +1,5 @@
using System; using System;
using System.Text;
namespace LexicalAnalysis { namespace LexicalAnalysis {
/// <summary> /// <summary>
@ -69,21 +70,21 @@ namespace LexicalAnalysis {
}; };
private Int64 _val; private Int64 _val;
private String _raw; private StringBuilder _raw;
private TokenInt.IntSuffix _suffix; private TokenInt.IntSuffix _suffix;
private State _state; private State _state;
public FSAInt() { public FSAInt() {
this._state = State.START; this._state = State.START;
this._val = 0; this._val = 0;
this._raw = ""; this._raw = new StringBuilder();
this._suffix = TokenInt.IntSuffix.NONE; this._suffix = TokenInt.IntSuffix.NONE;
} }
public override void Reset() { public override void Reset() {
this._state = State.START; this._state = State.START;
this._val = 0; this._val = 0;
this._raw = ""; this._raw.Clear();
this._suffix = TokenInt.IntSuffix.NONE; this._suffix = TokenInt.IntSuffix.NONE;
} }
@ -101,11 +102,11 @@ namespace LexicalAnalysis {
} }
public override Token RetrieveToken() { public override Token RetrieveToken() {
return new TokenInt(this._val, this._suffix, this._raw.Substring(0, this._raw.Length - 1)); return new TokenInt(this._val, this._suffix, this._raw.ToString(0, this._raw.Length - 1));
} }
public override void ReadChar(Char ch) { public override void ReadChar(Char ch) {
this._raw += ch; this._raw.Append(ch);
switch (this._state) { switch (this._state) {
case State.ERROR: case State.ERROR:
case State.END: case State.END:

View File

@ -2,6 +2,7 @@
using System.Collections.Generic; using System.Collections.Generic;
using System.Collections.Immutable; using System.Collections.Immutable;
using System.Linq; using System.Linq;
using System.Text;
namespace LexicalAnalysis { namespace LexicalAnalysis {
/// <summary> /// <summary>
@ -166,16 +167,16 @@ namespace LexicalAnalysis {
); );
private State _state; private State _state;
private String _scanned; private StringBuilder _scanned;
public FSAOperator() { public FSAOperator() {
this._state = State.START; this._state = State.START;
this._scanned = ""; this._scanned = new StringBuilder();
} }
public override sealed void Reset() { public override sealed void Reset() {
this._state = State.START; this._state = State.START;
this._scanned = ""; this._scanned.Clear();
} }
public override sealed FSAStatus GetStatus() { public override sealed FSAStatus GetStatus() {
@ -192,11 +193,11 @@ namespace LexicalAnalysis {
} }
public override sealed Token RetrieveToken() { public override sealed Token RetrieveToken() {
return new TokenOperator(TokenOperator.Operators[this._scanned.Substring(0, this._scanned.Length - 1)]); return new TokenOperator(TokenOperator.Operators[this._scanned.ToString(0, this._scanned.Length - 1)]);
} }
public override sealed void ReadChar(Char ch) { public override sealed void ReadChar(Char ch) {
this._scanned = this._scanned + ch; this._scanned = this._scanned.Append(ch);
switch (this._state) { switch (this._state) {
case State.END: case State.END:
case State.ERROR: case State.ERROR:
@ -386,7 +387,7 @@ namespace LexicalAnalysis {
} }
public override sealed void ReadEOF() { public override sealed void ReadEOF() {
this._scanned = this._scanned + '0'; this._scanned = this._scanned.Append('0');
switch (this._state) { switch (this._state) {
case State.FINISH: case State.FINISH:
case State.SUB: case State.SUB:

View File

@ -36,21 +36,31 @@ namespace LexicalAnalysis {
private IEnumerable<Token> Lex() { private IEnumerable<Token> Lex() {
var tokens = new List<Token>(); var tokens = new List<Token>();
int line = 1, column = 1, lastColumn = column; int line = 1, column = 1, lastColumn = column;
char lastChr = '\0';
for (Int32 i = 0; i < this.Source.Length; ++i) { for (Int32 i = 0; i < this.Source.Length; ++i) {
if (i > 0 && this.Source[i - 1] == '\n') if (lastChr == '\n')
{ {
line++; line++;
lastColumn = 1; lastColumn = 1;
column = 1; column = 1;
} }
else column++; else column++;
this.FSAs.ForEach(fsa => fsa.ReadChar(this.Source[i])); bool isRunning = false;
int endIdx = -1;
var chr = Source[i];
for (int fsaIdx = 0; fsaIdx < FSAs.Count; fsaIdx++)
{
var fsa = FSAs[fsaIdx];
fsa.ReadChar(chr);
var status = fsa.GetStatus();
if (status == FSAStatus.RUNNING) isRunning = true;
else if (endIdx == -1 && status == FSAStatus.END) endIdx = fsaIdx;
}
// if no running // if no running
if (this.FSAs.FindIndex(fsa => fsa.GetStatus() == FSAStatus.RUNNING) == -1) { if (!isRunning) {
Int32 idx = this.FSAs.FindIndex(fsa => fsa.GetStatus() == FSAStatus.END); if (endIdx != -1) {
if (idx != -1) { Token token = this.FSAs[endIdx].RetrieveToken();
Token token = this.FSAs[idx].RetrieveToken();
if (token.Kind != TokenKind.NONE) { if (token.Kind != TokenKind.NONE) {
token.Line = line; token.Line = line;
token.Column = lastColumn; token.Column = lastColumn;
@ -58,19 +68,26 @@ namespace LexicalAnalysis {
tokens.Add(token); tokens.Add(token);
} }
i--; column--; i--; column--;
if (this.Source[i] == '\n') line--; if (lastChr == '\n') line--;
this.FSAs.ForEach(fsa => fsa.Reset()); foreach (var fsa in FSAs) fsa.Reset();
} else { } else {
Console.WriteLine("error"); Console.WriteLine("error");
} }
} }
if (!isRunning || endIdx == -1) lastChr = chr;
} }
this.FSAs.ForEach(fsa => fsa.ReadEOF()); var endIdx2 = -1;
for (int fsaIdx = 0; fsaIdx < FSAs.Count; fsaIdx++)
{
var fsa = FSAs[fsaIdx];
fsa.ReadEOF();
if (endIdx2 != -1) continue;
if (fsa.GetStatus() == FSAStatus.END) endIdx2 = fsaIdx;
}
// find END // find END
Int32 idx2 = this.FSAs.FindIndex(fsa => fsa.GetStatus() == FSAStatus.END); if (endIdx2 != -1) {
if (idx2 != -1) { Token token = this.FSAs[endIdx2].RetrieveToken();
Token token = this.FSAs[idx2].RetrieveToken();
if (token.Kind != TokenKind.NONE) { if (token.Kind != TokenKind.NONE) {
token.Line = line; token.Line = line;
token.Column = column + 1; token.Column = column + 1;

View File

@ -1,4 +1,5 @@
using System; using System;
using System.Text;
namespace LexicalAnalysis { namespace LexicalAnalysis {
/// <summary> /// <summary>
@ -54,22 +55,22 @@ namespace LexicalAnalysis {
private State _state; private State _state;
private readonly FSAChar _fsachar; private readonly FSAChar _fsachar;
private String _val; private StringBuilder _val;
private String _raw; private StringBuilder _raw;
private bool unicode = false; private bool unicode = false;
public FSAString() { public FSAString() {
this._state = State.START; this._state = State.START;
this._fsachar = new FSAChar('\"'); this._fsachar = new FSAChar('\"');
this._raw = ""; this._raw = new StringBuilder();
this._val = ""; this._val = new StringBuilder();
} }
public override void Reset() { public override void Reset() {
this._state = State.START; this._state = State.START;
this._fsachar.Reset(); this._fsachar.Reset();
this._raw = ""; this._raw.Clear();
this._val = ""; this._val.Clear();
unicode = false; unicode = false;
} }
@ -87,8 +88,8 @@ namespace LexicalAnalysis {
} }
public override Token RetrieveToken() { public override Token RetrieveToken() {
if (unicode) return new TokenUnicodeString(this._val, this._raw); if (unicode) return new TokenUnicodeString(this._val.ToString(), this._raw.ToString());
return new TokenString(this._val, this._raw); return new TokenString(this._val.ToString(), this._raw.ToString());
} }
public override void ReadChar(Char ch) { public override void ReadChar(Char ch) {
@ -129,8 +130,8 @@ namespace LexicalAnalysis {
switch (this._fsachar.GetStatus()) { switch (this._fsachar.GetStatus()) {
case FSAStatus.END: case FSAStatus.END:
this._state = State.Q; this._state = State.Q;
this._val = this._val + this._fsachar.RetrieveChar(); this._val.Append(this._fsachar.RetrieveChar());
this._raw = this._raw + this._fsachar.RetrieveRaw(); this._raw.Append(this._fsachar.RetrieveRaw());
this._fsachar.Reset(); this._fsachar.Reset();
ReadChar(ch); ReadChar(ch);
break; break;