|
@@ -14,6 +14,7 @@ export bool isKeyword(string lexeme){
|
|
|
export Token[] tokenize(string source, ubyte indent_type, ubyte indent_matter){
|
|
|
Token[] tokens;
|
|
|
bool inside_string = false;
|
|
|
+ bool inside_comment = false;
|
|
|
//ubyte inside_string_type = 0; // 0 = ".."; 1 = `..`; 2 = todo
|
|
|
string str_helper = "";
|
|
|
int i = 0;
|
|
@@ -54,6 +55,24 @@ export Token[] tokenize(string source, ubyte indent_type, ubyte indent_matter){
|
|
|
}
|
|
|
|
|
|
|
|
|
+ }else if( (inside_string == false) && (inside_comment == false) && (source[i] == '/') && ( (i + 1) < source.length ) && (source[i + 1] == '/') ){ // single line comment "//" begins
|
|
|
+ auto start = i;
|
|
|
+ inside_comment = true;
|
|
|
+ i++;
|
|
|
+
|
|
|
+ while( (i < source.length) && inside_comment ){ // goto Type single line comment end position
|
|
|
+ if(source[i] == '\n'){ // line end means single line comment ends
|
|
|
+ i++;
|
|
|
+ inside_string = false;
|
|
|
+
|
|
|
+ }else{ // single line comment not ends yet
|
|
|
+ i++;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ tokens ~= Token(TokenType.Comment_Line, source[start .. i]);
|
|
|
+ tokens ~= Token(TokenType.New_Line, "\n");
|
|
|
+
|
|
|
+
|
|
|
}else if( (inside_string == false) && (source[i] == '"') ){ // Type string ".." begins
|
|
|
auto start = i++; // string begin position
|
|
|
inside_string = true;
|