revamped lexer code to be more flexible
This commit is contained in:
parent
ff05782056
commit
e4ad03774e
1 changed files with 52 additions and 44 deletions
96
src/Lex.cs
96
src/Lex.cs
|
@ -2,51 +2,65 @@ using System.Text.RegularExpressions;
|
|||
|
||||
using Dungeoneer.Error;
|
||||
|
||||
namespace Dungeoneer.Lexing {
|
||||
namespace Dungeoneer.Interpreter {
|
||||
|
||||
public static class Lexer {
|
||||
|
||||
public static readonly char[] Delimiters = { ' ', '+', '-', '*', '/', '^' };
|
||||
public static readonly char[] Operators = { '+', '-', '*', '/', '^' };
|
||||
|
||||
public static TokenSet Tokenize(string text) {
|
||||
var output = new TokenSet();
|
||||
var parts = text.Trim().Split(" ", 0x11);
|
||||
foreach(string part in parts) {
|
||||
|
||||
var diceMatch = DiceToken.Match(part);
|
||||
if(diceMatch.Success) {
|
||||
output.Add(new DiceToken(diceMatch));
|
||||
continue;
|
||||
}
|
||||
|
||||
var numberMatch = NumberToken.Match(part);
|
||||
if(numberMatch.Success) {
|
||||
output.Add(new NumberToken(numberMatch));
|
||||
continue;
|
||||
}
|
||||
|
||||
var dcMatch = DcToken.Match(part);
|
||||
if(dcMatch.Success) {
|
||||
output.Add(new DcToken(dcMatch));
|
||||
continue;
|
||||
}
|
||||
|
||||
var operatorMatch = OperatorToken.Match(part);
|
||||
if(operatorMatch.Success) {
|
||||
output.Add(new OperatorToken(operatorMatch));
|
||||
continue;
|
||||
}
|
||||
|
||||
var varMatch = VarToken.Match(part);
|
||||
if(varMatch.Success) {
|
||||
output.Add(new VarToken(varMatch));
|
||||
continue;
|
||||
}
|
||||
|
||||
throw new UnmatchedTokenException(part);
|
||||
/* - iterate over input characters:
|
||||
* - create a buffer of text characters
|
||||
* - if a delimiter is found:
|
||||
* - consume the buffer into a token and add to output
|
||||
* - if the delimiter was an operator, add its token to output
|
||||
* - when the string terminates, consume the buffer
|
||||
*/
|
||||
string buffer = "";
|
||||
foreach(char c in text) {
|
||||
if(Delimiters.Contains(c)) {
|
||||
if(buffer.Length != 0) {
|
||||
Console.WriteLine(buffer);
|
||||
var token = Match(buffer);
|
||||
output.Add(token);
|
||||
}
|
||||
|
||||
if(Operators.Contains(c))
|
||||
output.Add(new OperatorToken(c));
|
||||
buffer = "";
|
||||
} else
|
||||
buffer += c;
|
||||
}
|
||||
if(buffer.Length != 0)
|
||||
output.Add(Match(buffer));
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
public static Token Match(string text) {
|
||||
|
||||
var diceMatch = DiceToken.Match(text);
|
||||
if(diceMatch.Success)
|
||||
return new DiceToken(diceMatch);
|
||||
|
||||
var numberMatch = NumberToken.Match(text);
|
||||
if(numberMatch.Success)
|
||||
return new NumberToken(numberMatch);
|
||||
|
||||
var dcMatch = DcToken.Match(text);
|
||||
if(dcMatch.Success)
|
||||
return new DcToken(dcMatch);
|
||||
|
||||
var varMatch = VarToken.Match(text);
|
||||
if(varMatch.Success)
|
||||
return new VarToken(varMatch);
|
||||
|
||||
throw new UnmatchedTokenException(text);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public class TokenSet : List<Token> {
|
||||
|
@ -57,7 +71,7 @@ namespace Dungeoneer.Lexing {
|
|||
var output = "";
|
||||
foreach(Token token in this)
|
||||
output += $"{token} ";
|
||||
return output;
|
||||
return output.Trim();
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -79,17 +93,11 @@ namespace Dungeoneer.Lexing {
|
|||
}
|
||||
|
||||
public class OperatorToken : Token {
|
||||
public string dummy;
|
||||
public char Value;
|
||||
|
||||
internal static readonly Regex Pattern = new Regex(@"([\+\-\*\/])");
|
||||
public OperatorToken(char op) { Value = op; }
|
||||
|
||||
public OperatorToken(Match match) {
|
||||
dummy = match.Groups[1].Value;
|
||||
}
|
||||
|
||||
public override string ToString() { return dummy; }
|
||||
|
||||
public static Match Match(string text) { return Pattern.Match(text); }
|
||||
public override string ToString() { return Value.ToString(); }
|
||||
|
||||
}
|
||||
|
||||
|
|
Reference in a new issue