Browse Source

fix single line comment lexer & refactor a bit

221V 3 weeks ago
parent
commit
1b4106a177
1 changed files with 4 additions and 6 deletions
  1. 4 6
      source/dopp_lexer.d

+ 4 - 6
source/dopp_lexer.d

@@ -55,22 +55,20 @@ export Token[] tokenize(string source, ubyte indent_type, ubyte indent_matter){
       }
       }
     
     
     
     
-    }else if( (inside_string == false) && (inside_comment == false) && (source[i] == '/') && ( (i + 1) < source.length ) && (source[i + 1] == '/') ){ // single line comment "//" begins
+    }else if( (source[i] == '/') && (inside_string == false) && (inside_comment == false) && ( (i + 1) < source.length ) && (source[i + 1] == '/') ){ // single line comment "//" begins
       auto start = i;
       auto start = i;
       inside_comment = true;
       inside_comment = true;
       i++;
       i++;
       
       
-      while( (i < source.length) && inside_comment ){ // goto Type single line comment end position
+      while( inside_comment && (i < source.length) ){ // goto Type single line comment end position
         if(source[i] == '\n'){ // line end means single line comment ends
         if(source[i] == '\n'){ // line end means single line comment ends
-          i++;
+          inside_comment = false;
-          inside_string = false;
-        
         }else{ // single line comment not ends yet
         }else{ // single line comment not ends yet
           i++;
           i++;
         }
         }
       }
       }
+      if(inside_comment){ inside_comment = false; }
       tokens ~= Token(TokenType.Comment_Line, source[start .. i]);
       tokens ~= Token(TokenType.Comment_Line, source[start .. i]);
-      tokens ~= Token(TokenType.New_Line, "\n");
     
     
     
     
     }else if( (inside_string == false) && (source[i] == '"') ){ // Type string ".." begins
     }else if( (inside_string == false) && (source[i] == '"') ){ // Type string ".." begins