mirror of
https://github.com/Kugelschieber/asl.git
synced 2026-01-18 12:00:25 +00:00
Fixed tokenizer problem.
This commit is contained in:
4
ToDo.md
4
ToDo.md
@@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
* ~~assign to returned values~~
|
* ~~assign to returned values~~
|
||||||
* special cases (like if ... exitWith, waitUntil {...})
|
* special cases (like if ... exitWith, waitUntil {...})
|
||||||
* sqf: ... sqf whitespace
|
* ~~sqf: ... sqf whitespace~~
|
||||||
* ~~solution for build in commands which do not require left values~~
|
* ~~solution for build in commands which do not require left values~~
|
||||||
* ~~pretty/minified printing~~
|
* ~~pretty/minified printing~~
|
||||||
* ~~usage~~
|
* ~~usage~~
|
||||||
@@ -10,4 +10,4 @@
|
|||||||
* concurrent compiling
|
* concurrent compiling
|
||||||
* ~~inline buildin function call -> foo(a)(bar(x)(y));~~
|
* ~~inline buildin function call -> foo(a)(bar(x)(y));~~
|
||||||
* ~~negative values e.g. -1, operator !~~
|
* ~~negative values e.g. -1, operator !~~
|
||||||
* tokenizer splits commands like "format" -> for, mat
|
* ~~tokenizer splits commands like "format" -> for, mat~~
|
||||||
|
|||||||
@@ -1,3 +1,2 @@
|
|||||||
//diag_log format ["easyHC: found headless client with ID %1.", easyHCpresent];
|
//diag_log format ["easyHC: found headless client with ID %1.", easyHCpresent];
|
||||||
//diag_log () (format(xy)("asdf", "hjkl"));
|
diag_log () (format(xy)("asdf", "hjkl"));
|
||||||
var floating = 1.23;
|
|
||||||
|
|||||||
@@ -29,7 +29,7 @@ func parseBlock() {
|
|||||||
parseSwitch()
|
parseSwitch()
|
||||||
} else if accept("for") {
|
} else if accept("for") {
|
||||||
parseFor()
|
parseFor()
|
||||||
} else if accept("each") {
|
} else if accept("foreach") {
|
||||||
parseForeach()
|
parseForeach()
|
||||||
} else if accept("func") {
|
} else if accept("func") {
|
||||||
parseFunction()
|
parseFunction()
|
||||||
@@ -182,7 +182,7 @@ func parseFor() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func parseForeach() {
|
func parseForeach() {
|
||||||
expect("each")
|
expect("foreach")
|
||||||
expr := parseExpression(false)
|
expr := parseExpression(false)
|
||||||
expect("{")
|
expect("{")
|
||||||
appendOut("{", true)
|
appendOut("{", true)
|
||||||
|
|||||||
@@ -41,8 +41,8 @@ func TestParserFor(t *testing.T) {
|
|||||||
equal(t, got, want)
|
equal(t, got, want)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestParserEach(t *testing.T) {
|
func TestParserForeach(t *testing.T) {
|
||||||
got := getCompiled(t, "test/tokenizer_each.asl")
|
got := getCompiled(t, "test/tokenizer_foreach.asl")
|
||||||
want := "{\n} forEach (allUnits);\n"
|
want := "{\n} forEach (allUnits);\n"
|
||||||
|
|
||||||
equal(t, got, want)
|
equal(t, got, want)
|
||||||
|
|||||||
@@ -35,7 +35,7 @@ var keywords = []string{
|
|||||||
"while",
|
"while",
|
||||||
"switch",
|
"switch",
|
||||||
"for",
|
"for",
|
||||||
"each",
|
"foreach",
|
||||||
"func",
|
"func",
|
||||||
"true",
|
"true",
|
||||||
"false",
|
"false",
|
||||||
@@ -44,6 +44,7 @@ var keywords = []string{
|
|||||||
"return"}
|
"return"}
|
||||||
|
|
||||||
var whitespace = []byte{' ', '\n', '\t'}
|
var whitespace = []byte{' ', '\n', '\t'}
|
||||||
|
var identifier = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_"
|
||||||
|
|
||||||
// Tokenizes the given byte array into syntax tokens,
|
// Tokenizes the given byte array into syntax tokens,
|
||||||
// which can be parsed later.
|
// which can be parsed later.
|
||||||
@@ -51,7 +52,7 @@ func Tokenize(code []byte) []Token {
|
|||||||
code = removeComments(code)
|
code = removeComments(code)
|
||||||
tokens := make([]Token, 0)
|
tokens := make([]Token, 0)
|
||||||
token, mask, isstring := "", false, false
|
token, mask, isstring := "", false, false
|
||||||
|
|
||||||
for i := range code {
|
for i := range code {
|
||||||
c := code[i]
|
c := code[i]
|
||||||
|
|
||||||
@@ -80,7 +81,7 @@ func Tokenize(code []byte) []Token {
|
|||||||
|
|
||||||
tokens = append(tokens, Token{string(c)})
|
tokens = append(tokens, Token{string(c)})
|
||||||
token = ""
|
token = ""
|
||||||
} else if stringArrayContains(keywords, strings.ToLower(token)) {
|
} else if stringArrayContains(keywords, strings.ToLower(token)) && !isIdentifierCharacter(c) {
|
||||||
tokens = append(tokens, Token{token})
|
tokens = append(tokens, Token{token})
|
||||||
token = ""
|
token = ""
|
||||||
} else if !byteArrayContains(whitespace, c) {
|
} else if !byteArrayContains(whitespace, c) {
|
||||||
@@ -183,3 +184,14 @@ func stringArrayContains(haystack []string, needle string) bool {
|
|||||||
|
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Checks if a character is allowed for identifiers.
|
||||||
|
func isIdentifierCharacter(c byte) bool {
|
||||||
|
for i := range identifier {
|
||||||
|
if identifier[i] == c {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|||||||
@@ -37,9 +37,9 @@ func TestTokenizerFor(t *testing.T) {
|
|||||||
compareTokens(t, &got, &want)
|
compareTokens(t, &got, &want)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestTokenizerEach(t *testing.T) {
|
func TestTokenizerForach(t *testing.T) {
|
||||||
got := getTokens(t, "test/tokenizer_each.asl")
|
got := getTokens(t, "test/tokenizer_foreach.asl")
|
||||||
want := []string{"each", "allUnits", "{", "}"}
|
want := []string{"foreach", "allUnits", "{", "}"}
|
||||||
|
|
||||||
compareLength(t, &got, &want)
|
compareLength(t, &got, &want)
|
||||||
compareTokens(t, &got, &want)
|
compareTokens(t, &got, &want)
|
||||||
@@ -69,6 +69,14 @@ func TestTokenizerExpression(t *testing.T) {
|
|||||||
compareTokens(t, &got, &want)
|
compareTokens(t, &got, &want)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestTokenizerIdentifier(t *testing.T) {
|
||||||
|
got := getTokens(t, "test/tokenizer_identifier.asl")
|
||||||
|
want := []string{"var", "format", "=", "\"should not be for mat!\"", ";"}
|
||||||
|
|
||||||
|
compareLength(t, &got, &want)
|
||||||
|
compareTokens(t, &got, &want)
|
||||||
|
}
|
||||||
|
|
||||||
func compareLength(t *testing.T, got *[]Token, want *[]string) {
|
func compareLength(t *testing.T, got *[]Token, want *[]string) {
|
||||||
if len(*got) != len(*want) {
|
if len(*got) != len(*want) {
|
||||||
t.Error("Length of tokens got and expected tokens not equal, was:")
|
t.Error("Length of tokens got and expected tokens not equal, was:")
|
||||||
|
|||||||
@@ -1,3 +0,0 @@
|
|||||||
each allUnits {
|
|
||||||
// ...
|
|
||||||
}
|
|
||||||
3
test/tokenizer_foreach.asl
Normal file
3
test/tokenizer_foreach.asl
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
foreach allUnits {
|
||||||
|
// ...
|
||||||
|
}
|
||||||
1
test/tokenizer_identifier.asl
Normal file
1
test/tokenizer_identifier.asl
Normal file
@@ -0,0 +1 @@
|
|||||||
|
var format = "should not be for mat!";
|
||||||
Reference in New Issue
Block a user