|
@@ -4,13 +4,14 @@ import toml;
|
|
public{
|
|
public{
|
|
import std.stdio : writeln;
|
|
import std.stdio : writeln;
|
|
import std.file : read;
|
|
import std.file : read;
|
|
- import std.array;
|
|
|
|
|
|
+ import std.array; // .array.idup + other
|
|
import std.string;
|
|
import std.string;
|
|
import std.uni : isWhite, isAlpha, isAlphaNum;
|
|
import std.uni : isWhite, isAlpha, isAlphaNum;
|
|
import std.ascii : isDigit;
|
|
import std.ascii : isDigit;
|
|
import std.conv : to;
|
|
import std.conv : to;
|
|
import std.algorithm;
|
|
import std.algorithm;
|
|
import std.sumtype : SumType;
|
|
import std.sumtype : SumType;
|
|
|
|
+ import std.range : repeat;
|
|
}
|
|
}
|
|
|
|
|
|
import dopp_lexer : tokenize, isKeyword;
|
|
import dopp_lexer : tokenize, isKeyword;
|
|
@@ -28,8 +29,11 @@ export enum TokenType{
|
|
String,
|
|
String,
|
|
Symbol,
|
|
Symbol,
|
|
Whitespace, // maybe todo add equals and other (=, >=, <=, ==) - needs or not ?
|
|
Whitespace, // maybe todo add equals and other (=, >=, <=, ==) - needs or not ?
|
|
|
|
+ Round_Bracket,
|
|
Comment_Line,
|
|
Comment_Line,
|
|
New_Line,
|
|
New_Line,
|
|
|
|
+ Indent_Incr,
|
|
|
|
+ Indent_Decr,
|
|
Match_Any
|
|
Match_Any
|
|
}
|
|
}
|
|
|
|
|
|
@@ -41,9 +45,14 @@ export struct Token{
|
|
// for debug
|
|
// for debug
|
|
string toString(){
|
|
string toString(){
|
|
import std.format : format;
|
|
import std.format : format;
|
|
|
|
+ import std.string : replace;
|
|
//return "Token(type: %d, lexeme: %s)".format(type, lexeme);
|
|
//return "Token(type: %d, lexeme: %s)".format(type, lexeme);
|
|
//return "Token = type: %d, lexeme: %s\n".format(type, lexeme);
|
|
//return "Token = type: %d, lexeme: %s\n".format(type, lexeme);
|
|
- return "Token = type: %d, lexeme: %s\n".format(type, ( (TokenType.New_Line == type) ? `\n` : lexeme) );
|
|
|
|
|
|
+ return "Token = type: %d, lexeme: %s\n".format(type,
|
|
|
|
+ ( (type == TokenType.New_Line) ?
|
|
|
|
+ `\n` :
|
|
|
|
+ ( (type == TokenType.Indent_Incr) || (type == TokenType.Indent_Decr) ) ?
|
|
|
|
+ lexeme.replace("\n", "\\n") : lexeme ) );
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
|
|
@@ -202,7 +211,7 @@ int main(string[] argv){
|
|
// writeln("valid_argv = ", valid_argv); // todo add config key-value "no_warn" = false | true for silence
|
|
// writeln("valid_argv = ", valid_argv); // todo add config key-value "no_warn" = false | true for silence
|
|
|
|
|
|
if(!valid_argv){
|
|
if(!valid_argv){
|
|
- return 0;
|
|
|
|
|
|
+ return 1;
|
|
}
|
|
}
|
|
|
|
|
|
|
|
|
|
@@ -231,11 +240,13 @@ int main(string[] argv){
|
|
|
|
|
|
string source = q"[
|
|
string source = q"[
|
|
auto y1 = "this is test";
|
|
auto y1 = "this is test";
|
|
-auto y2 = "this \"is\" test";
|
|
|
|
-auto y3 = `this "is" test`; // this is single line comment
|
|
|
|
|
|
+if(true)
|
|
|
|
+ auto y2 = "this \"is\" test";
|
|
|
|
+else
|
|
|
|
+ auto y3 = `this "is" test`; // this is single line comment
|
|
auto y4 = `this is test`;
|
|
auto y4 = `this is test`;
|
|
auto y5 = `this `is` test`;
|
|
auto y5 = `this `is` test`;
|
|
- ]"; // "
|
|
|
|
|
|
+]"; // "
|
|
|
|
|
|
/+
|
|
/+
|
|
string source = q"[
|
|
string source = q"[
|
|
@@ -261,9 +272,9 @@ void main(){
|
|
]";
|
|
]";
|
|
+/
|
|
+/
|
|
|
|
|
|
- auto tokens = tokenize(source, 0, 2); // indent_type = 0 = whitespace; indent_matter = 2 (whitespaces)
|
|
|
|
|
|
+ auto tokens = tokenize(source, 0, 2, 2); // indent_type = 0 = whitespace; indent_matter = 2 (whitespaces); indent_out = 2 (whitespaces in output-generated code)
|
|
writeln(tokens);
|
|
writeln(tokens);
|
|
- auto result = parse(tokens); // indent_out = 2 (whitespaces in output-generated code)
|
|
|
|
|
|
+ auto result = parse(tokens);
|
|
writeln(result);
|
|
writeln(result);
|
|
|
|
|
|
/+
|
|
/+
|
|
@@ -282,7 +293,7 @@ void main(){
|
|
+/
|
|
+/
|
|
|
|
|
|
|
|
|
|
- return 1;
|
|
|
|
|
|
+ return 0;
|
|
}
|
|
}
|
|
|
|
|
|
|
|
|