Fixed tokenizer problem.

This commit is contained in:
Marvin Blum
2015-10-25 13:03:51 +01:00
parent b8e38b2351
commit 276dd2c1e9
9 changed files with 37 additions and 17 deletions

View File

@@ -2,7 +2,7 @@
* ~~assign to returned values~~ * ~~assign to returned values~~
* special cases (like if ... exitWith, waitUntil {...}) * special cases (like if ... exitWith, waitUntil {...})
* sqf: ... sqf whitespace * ~~sqf: ... sqf whitespace~~
* ~~solution for build in commands which do not require left values~~ * ~~solution for build in commands which do not require left values~~
* ~~pretty/minified printing~~ * ~~pretty/minified printing~~
* ~~usage~~ * ~~usage~~
@@ -10,4 +10,4 @@
* concurrent compiling * concurrent compiling
* ~~inline buildin function call -> foo(a)(bar(x)(y));~~ * ~~inline buildin function call -> foo(a)(bar(x)(y));~~
* ~~negative values e.g. -1, operator !~~ * ~~negative values e.g. -1, operator !~~
* tokenizer splits commands like "format" -> for, mat * ~~tokenizer splits commands like "format" -> for, mat~~

View File

@@ -1,3 +1,2 @@
//diag_log format ["easyHC: found headless client with ID %1.", easyHCpresent]; //diag_log format ["easyHC: found headless client with ID %1.", easyHCpresent];
//diag_log () (format(xy)("asdf", "hjkl")); diag_log () (format(xy)("asdf", "hjkl"));
var floating = 1.23;

View File

@@ -29,7 +29,7 @@ func parseBlock() {
parseSwitch() parseSwitch()
} else if accept("for") { } else if accept("for") {
parseFor() parseFor()
} else if accept("each") { } else if accept("foreach") {
parseForeach() parseForeach()
} else if accept("func") { } else if accept("func") {
parseFunction() parseFunction()
@@ -182,7 +182,7 @@ func parseFor() {
} }
func parseForeach() { func parseForeach() {
expect("each") expect("foreach")
expr := parseExpression(false) expr := parseExpression(false)
expect("{") expect("{")
appendOut("{", true) appendOut("{", true)

View File

@@ -41,8 +41,8 @@ func TestParserFor(t *testing.T) {
equal(t, got, want) equal(t, got, want)
} }
func TestParserEach(t *testing.T) { func TestParserForeach(t *testing.T) {
got := getCompiled(t, "test/tokenizer_each.asl") got := getCompiled(t, "test/tokenizer_foreach.asl")
want := "{\n} forEach (allUnits);\n" want := "{\n} forEach (allUnits);\n"
equal(t, got, want) equal(t, got, want)

View File

@@ -35,7 +35,7 @@ var keywords = []string{
"while", "while",
"switch", "switch",
"for", "for",
"each", "foreach",
"func", "func",
"true", "true",
"false", "false",
@@ -44,6 +44,7 @@ var keywords = []string{
"return"} "return"}
var whitespace = []byte{' ', '\n', '\t'} var whitespace = []byte{' ', '\n', '\t'}
var identifier = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_"
// Tokenizes the given byte array into syntax tokens, // Tokenizes the given byte array into syntax tokens,
// which can be parsed later. // which can be parsed later.
@@ -80,7 +81,7 @@ func Tokenize(code []byte) []Token {
tokens = append(tokens, Token{string(c)}) tokens = append(tokens, Token{string(c)})
token = "" token = ""
} else if stringArrayContains(keywords, strings.ToLower(token)) { } else if stringArrayContains(keywords, strings.ToLower(token)) && !isIdentifierCharacter(c) {
tokens = append(tokens, Token{token}) tokens = append(tokens, Token{token})
token = "" token = ""
} else if !byteArrayContains(whitespace, c) { } else if !byteArrayContains(whitespace, c) {
@@ -183,3 +184,14 @@ func stringArrayContains(haystack []string, needle string) bool {
return false return false
} }
// Checks if a character is allowed for identifiers.
func isIdentifierCharacter(c byte) bool {
for i := range identifier {
if identifier[i] == c {
return true
}
}
return false
}

View File

@@ -37,9 +37,9 @@ func TestTokenizerFor(t *testing.T) {
compareTokens(t, &got, &want) compareTokens(t, &got, &want)
} }
func TestTokenizerEach(t *testing.T) { func TestTokenizerForach(t *testing.T) {
got := getTokens(t, "test/tokenizer_each.asl") got := getTokens(t, "test/tokenizer_foreach.asl")
want := []string{"each", "allUnits", "{", "}"} want := []string{"foreach", "allUnits", "{", "}"}
compareLength(t, &got, &want) compareLength(t, &got, &want)
compareTokens(t, &got, &want) compareTokens(t, &got, &want)
@@ -69,6 +69,14 @@ func TestTokenizerExpression(t *testing.T) {
compareTokens(t, &got, &want) compareTokens(t, &got, &want)
} }
func TestTokenizerIdentifier(t *testing.T) {
got := getTokens(t, "test/tokenizer_identifier.asl")
want := []string{"var", "format", "=", "\"should not be for mat!\"", ";"}
compareLength(t, &got, &want)
compareTokens(t, &got, &want)
}
func compareLength(t *testing.T, got *[]Token, want *[]string) { func compareLength(t *testing.T, got *[]Token, want *[]string) {
if len(*got) != len(*want) { if len(*got) != len(*want) {
t.Error("Length of tokens got and expected tokens not equal, was:") t.Error("Length of tokens got and expected tokens not equal, was:")

View File

@@ -1,3 +0,0 @@
each allUnits {
// ...
}

View File

@@ -0,0 +1,3 @@
foreach allUnits {
// ...
}

View File

@@ -0,0 +1 @@
var format = "should not be for mat!";