Fixed tokenizer problem.

This commit is contained in:
Marvin Blum
2015-10-25 13:03:51 +01:00
parent b8e38b2351
commit 276dd2c1e9
9 changed files with 37 additions and 17 deletions

View File

@@ -2,7 +2,7 @@
* ~~assign to returned values~~
* special cases (like if ... exitWith, waitUntil {...})
* sqf: ... sqf whitespace
* ~~sqf: ... sqf whitespace~~
* ~~solution for build in commands which do not require left values~~
* ~~pretty/minified printing~~
* ~~usage~~
@@ -10,4 +10,4 @@
* concurrent compiling
* ~~inline buildin function call -> foo(a)(bar(x)(y));~~
* ~~negative values e.g. -1, operator !~~
* tokenizer splits commands like "format" -> for, mat
* ~~tokenizer splits commands like "format" -> for, mat~~

View File

@@ -1,3 +1,2 @@
//diag_log format ["easyHC: found headless client with ID %1.", easyHCpresent];
//diag_log () (format(xy)("asdf", "hjkl"));
var floating = 1.23;
diag_log () (format(xy)("asdf", "hjkl"));

View File

@@ -29,7 +29,7 @@ func parseBlock() {
parseSwitch()
} else if accept("for") {
parseFor()
} else if accept("each") {
} else if accept("foreach") {
parseForeach()
} else if accept("func") {
parseFunction()
@@ -182,7 +182,7 @@ func parseFor() {
}
func parseForeach() {
expect("each")
expect("foreach")
expr := parseExpression(false)
expect("{")
appendOut("{", true)

View File

@@ -41,8 +41,8 @@ func TestParserFor(t *testing.T) {
equal(t, got, want)
}
func TestParserEach(t *testing.T) {
got := getCompiled(t, "test/tokenizer_each.asl")
func TestParserForeach(t *testing.T) {
got := getCompiled(t, "test/tokenizer_foreach.asl")
want := "{\n} forEach (allUnits);\n"
equal(t, got, want)

View File

@@ -35,7 +35,7 @@ var keywords = []string{
"while",
"switch",
"for",
"each",
"foreach",
"func",
"true",
"false",
@@ -44,6 +44,7 @@ var keywords = []string{
"return"}
var whitespace = []byte{' ', '\n', '\t'}
var identifier = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_"
// Tokenizes the given byte array into syntax tokens,
// which can be parsed later.
@@ -80,7 +81,7 @@ func Tokenize(code []byte) []Token {
tokens = append(tokens, Token{string(c)})
token = ""
} else if stringArrayContains(keywords, strings.ToLower(token)) {
} else if stringArrayContains(keywords, strings.ToLower(token)) && !isIdentifierCharacter(c) {
tokens = append(tokens, Token{token})
token = ""
} else if !byteArrayContains(whitespace, c) {
@@ -183,3 +184,14 @@ func stringArrayContains(haystack []string, needle string) bool {
return false
}
// Checks if a character is allowed for identifiers.
func isIdentifierCharacter(c byte) bool {
for i := range identifier {
if identifier[i] == c {
return true
}
}
return false
}

View File

@@ -37,9 +37,9 @@ func TestTokenizerFor(t *testing.T) {
compareTokens(t, &got, &want)
}
func TestTokenizerEach(t *testing.T) {
got := getTokens(t, "test/tokenizer_each.asl")
want := []string{"each", "allUnits", "{", "}"}
func TestTokenizerForach(t *testing.T) {
got := getTokens(t, "test/tokenizer_foreach.asl")
want := []string{"foreach", "allUnits", "{", "}"}
compareLength(t, &got, &want)
compareTokens(t, &got, &want)
@@ -69,6 +69,14 @@ func TestTokenizerExpression(t *testing.T) {
compareTokens(t, &got, &want)
}
func TestTokenizerIdentifier(t *testing.T) {
got := getTokens(t, "test/tokenizer_identifier.asl")
want := []string{"var", "format", "=", "\"should not be for mat!\"", ";"}
compareLength(t, &got, &want)
compareTokens(t, &got, &want)
}
func compareLength(t *testing.T, got *[]Token, want *[]string) {
if len(*got) != len(*want) {
t.Error("Length of tokens got and expected tokens not equal, was:")

View File

@@ -1,3 +0,0 @@
each allUnits {
// ...
}

View File

@@ -0,0 +1,3 @@
foreach allUnits {
// ...
}

View File

@@ -0,0 +1 @@
var format = "should not be for mat!";