mirror of
https://github.com/Kugelschieber/asl.git
synced 2026-01-18 03:50:25 +00:00
Fixed tokenizer problem.
This commit is contained in:
4
ToDo.md
4
ToDo.md
@@ -2,7 +2,7 @@
|
||||
|
||||
* ~~assign to returned values~~
|
||||
* special cases (like if ... exitWith, waitUntil {...})
|
||||
* sqf: ... sqf whitespace
|
||||
* ~~sqf: ... sqf whitespace~~
|
||||
* ~~solution for build in commands which do not require left values~~
|
||||
* ~~pretty/minified printing~~
|
||||
* ~~usage~~
|
||||
@@ -10,4 +10,4 @@
|
||||
* concurrent compiling
|
||||
* ~~inline buildin function call -> foo(a)(bar(x)(y));~~
|
||||
* ~~negative values e.g. -1, operator !~~
|
||||
* tokenizer splits commands like "format" -> for, mat
|
||||
* ~~tokenizer splits commands like "format" -> for, mat~~
|
||||
|
||||
@@ -1,3 +1,2 @@
|
||||
//diag_log format ["easyHC: found headless client with ID %1.", easyHCpresent];
|
||||
//diag_log () (format(xy)("asdf", "hjkl"));
|
||||
var floating = 1.23;
|
||||
diag_log () (format(xy)("asdf", "hjkl"));
|
||||
|
||||
@@ -29,7 +29,7 @@ func parseBlock() {
|
||||
parseSwitch()
|
||||
} else if accept("for") {
|
||||
parseFor()
|
||||
} else if accept("each") {
|
||||
} else if accept("foreach") {
|
||||
parseForeach()
|
||||
} else if accept("func") {
|
||||
parseFunction()
|
||||
@@ -182,7 +182,7 @@ func parseFor() {
|
||||
}
|
||||
|
||||
func parseForeach() {
|
||||
expect("each")
|
||||
expect("foreach")
|
||||
expr := parseExpression(false)
|
||||
expect("{")
|
||||
appendOut("{", true)
|
||||
|
||||
@@ -41,8 +41,8 @@ func TestParserFor(t *testing.T) {
|
||||
equal(t, got, want)
|
||||
}
|
||||
|
||||
func TestParserEach(t *testing.T) {
|
||||
got := getCompiled(t, "test/tokenizer_each.asl")
|
||||
func TestParserForeach(t *testing.T) {
|
||||
got := getCompiled(t, "test/tokenizer_foreach.asl")
|
||||
want := "{\n} forEach (allUnits);\n"
|
||||
|
||||
equal(t, got, want)
|
||||
|
||||
@@ -35,7 +35,7 @@ var keywords = []string{
|
||||
"while",
|
||||
"switch",
|
||||
"for",
|
||||
"each",
|
||||
"foreach",
|
||||
"func",
|
||||
"true",
|
||||
"false",
|
||||
@@ -44,6 +44,7 @@ var keywords = []string{
|
||||
"return"}
|
||||
|
||||
var whitespace = []byte{' ', '\n', '\t'}
|
||||
var identifier = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_"
|
||||
|
||||
// Tokenizes the given byte array into syntax tokens,
|
||||
// which can be parsed later.
|
||||
@@ -51,7 +52,7 @@ func Tokenize(code []byte) []Token {
|
||||
code = removeComments(code)
|
||||
tokens := make([]Token, 0)
|
||||
token, mask, isstring := "", false, false
|
||||
|
||||
|
||||
for i := range code {
|
||||
c := code[i]
|
||||
|
||||
@@ -80,7 +81,7 @@ func Tokenize(code []byte) []Token {
|
||||
|
||||
tokens = append(tokens, Token{string(c)})
|
||||
token = ""
|
||||
} else if stringArrayContains(keywords, strings.ToLower(token)) {
|
||||
} else if stringArrayContains(keywords, strings.ToLower(token)) && !isIdentifierCharacter(c) {
|
||||
tokens = append(tokens, Token{token})
|
||||
token = ""
|
||||
} else if !byteArrayContains(whitespace, c) {
|
||||
@@ -183,3 +184,14 @@ func stringArrayContains(haystack []string, needle string) bool {
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// Checks if a character is allowed for identifiers.
|
||||
func isIdentifierCharacter(c byte) bool {
|
||||
for i := range identifier {
|
||||
if identifier[i] == c {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
@@ -37,9 +37,9 @@ func TestTokenizerFor(t *testing.T) {
|
||||
compareTokens(t, &got, &want)
|
||||
}
|
||||
|
||||
func TestTokenizerEach(t *testing.T) {
|
||||
got := getTokens(t, "test/tokenizer_each.asl")
|
||||
want := []string{"each", "allUnits", "{", "}"}
|
||||
func TestTokenizerForach(t *testing.T) {
|
||||
got := getTokens(t, "test/tokenizer_foreach.asl")
|
||||
want := []string{"foreach", "allUnits", "{", "}"}
|
||||
|
||||
compareLength(t, &got, &want)
|
||||
compareTokens(t, &got, &want)
|
||||
@@ -69,6 +69,14 @@ func TestTokenizerExpression(t *testing.T) {
|
||||
compareTokens(t, &got, &want)
|
||||
}
|
||||
|
||||
func TestTokenizerIdentifier(t *testing.T) {
|
||||
got := getTokens(t, "test/tokenizer_identifier.asl")
|
||||
want := []string{"var", "format", "=", "\"should not be for mat!\"", ";"}
|
||||
|
||||
compareLength(t, &got, &want)
|
||||
compareTokens(t, &got, &want)
|
||||
}
|
||||
|
||||
func compareLength(t *testing.T, got *[]Token, want *[]string) {
|
||||
if len(*got) != len(*want) {
|
||||
t.Error("Length of tokens got and expected tokens not equal, was:")
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
each allUnits {
|
||||
// ...
|
||||
}
|
||||
3
test/tokenizer_foreach.asl
Normal file
3
test/tokenizer_foreach.asl
Normal file
@@ -0,0 +1,3 @@
|
||||
foreach allUnits {
|
||||
// ...
|
||||
}
|
||||
1
test/tokenizer_identifier.asl
Normal file
1
test/tokenizer_identifier.asl
Normal file
@@ -0,0 +1 @@
|
||||
var format = "should not be for mat!";
|
||||
Reference in New Issue
Block a user