|
@@ -25,9 +25,9 @@ export Token[] tokenize(string source){
|
|
|
string lexeme = source[start .. i];
|
|
|
tokens ~= Token(lexeme.isKeyword ? TokenType.Keyword : TokenType.Identifier, lexeme);
|
|
|
|
|
|
- }else if(source[i].isDigit){
|
|
|
+ }else if(source[i].isDigit){ // number
|
|
|
auto start = i;
|
|
|
- while(i < source.length && source[i].isDigit){
|
|
|
+ while(i < source.length && (source[i].isDigit || source[i] == '_')){ // underscore can be inside number like 5_000 etc
|
|
|
i++;
|
|
|
}
|
|
|
if(i < source.length && source[i] == '.'){ // include dot for float
|
|
@@ -41,9 +41,9 @@ export Token[] tokenize(string source){
|
|
|
tokens ~= Token(TokenType.Integer, source[start .. i]);
|
|
|
}
|
|
|
|
|
|
- }else if(source[i] == '"'){
|
|
|
- auto start = i++;
|
|
|
- while(i < source.length && source[i] != '"'){
|
|
|
+ }else if(source[i] == '"'){ // type string begins
|
|
|
+ auto start = i++; // string begin position
|
|
|
+ while(i < source.length && source[i] != '"'){ // goto type string end position
|
|
|
i++;
|
|
|
}
|
|
|
if(i < source.length){ // close quotes
|