diff --git a/CHANGELOG.md b/CHANGELOG.md index 4a9a067..e201263 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,10 @@ # Changelog +**1.1.1** + +* arrays can now be declared within expressions +* code keyword bug fix + **1.1.0** * changed syntax of foreach diff --git a/src/main/asl.go b/src/main/asl.go index 656a3c0..8523c84 100644 --- a/src/main/asl.go +++ b/src/main/asl.go @@ -101,7 +101,7 @@ func compile(path string) { continue } - token := tokenizer.Tokenize(code) + token := tokenizer.Tokenize(code, false) compiler := parser.Compiler{} sqf := compiler.Parse(token, pretty) diff --git a/src/parser/parser.go b/src/parser/parser.go index 52a85e8..2e9262d 100644 --- a/src/parser/parser.go +++ b/src/parser/parser.go @@ -314,7 +314,7 @@ func (c *Compiler) parseInlineCode() string { if len(code) > 2 { compiler := Compiler{} - output = "{"+compiler.Parse(tokenizer.Tokenize([]byte(code[1:len(code)-1])), false)+"}" + output = "{"+compiler.Parse(tokenizer.Tokenize([]byte(code[1:len(code)-1]), true), false)+"}" } c.expect(")") diff --git a/src/tokenizer/tokenizer.go b/src/tokenizer/tokenizer.go index eb11f00..cc0b557 100644 --- a/src/tokenizer/tokenizer.go +++ b/src/tokenizer/tokenizer.go @@ -58,7 +58,11 @@ var new_line = []byte{'\r', '\n'} // Tokenizes the given byte array into syntax tokens, // which can be parsed later. -func Tokenize(code []byte) []Token { +func Tokenize(code []byte, doStripSlashes bool) []Token { + if doStripSlashes { + code = stripSlashes(code); + } + code = removeComments(code) tokens := make([]Token, 0) token, mask, isstring, line, column := "", false, false, 0, 0 @@ -114,6 +118,28 @@ func Tokenize(code []byte) []Token { return tokens } +// Removes slashes from input code. +// This is used for the "code" keyword for correct strings in resulting code. +func stripSlashes(code []byte) []byte { + newcode := make([]byte, len(code)) + j, mask := 0, false + + for i := 0; i < len(code); i++ { + c := code[i] + + if c == '\\' && !mask { + mask = true + continue + } + + newcode[j] = code[i] + mask = false + j++ + } + + return newcode +} + // Removes all comments from input byte array. // Comments are single line comments, starting with // (two slashes), // multi line comments with /* ... */ (slash star, star slash). diff --git a/src/tokenizer/tokenizer_test.go b/src/tokenizer/tokenizer_test.go index ea50718..b1d3d25 100644 --- a/src/tokenizer/tokenizer_test.go +++ b/src/tokenizer/tokenizer_test.go @@ -94,6 +94,16 @@ func TestTokenizerPreprocessor(t *testing.T) { compareTokens(t, &got, &want) } +func TestTokenizerMask(t *testing.T) { + got := getTokens(t, "test/tokenizer_mask.asl") + //var y = code("var z = \"Hello \\"World\\"\";"); + want := []string{"var", "x", "=", "\"Hello \\\"World\\\"\"", ";", + "var", "y", "=", "code", "(", "\"var z = \\\"Hello \\\\\"World\\\\\"\\\";\"", ")", ";"} + + compareLength(t, &got, &want) + compareTokens(t, &got, &want) +} + func compareLength(t *testing.T, got *[]tokenizer.Token, want *[]string) { if len(*got) != len(*want) { t.Error("Length of tokens got and expected tokens not equal, was:") @@ -130,5 +140,5 @@ func getTokens(t *testing.T, file string) []tokenizer.Token { t.FailNow() } - return tokenizer.Tokenize(code) + return tokenizer.Tokenize(code, false) } diff --git a/test/tokenizer_mask.asl b/test/tokenizer_mask.asl new file mode 100644 index 0000000..c8fef9e --- /dev/null +++ b/test/tokenizer_mask.asl @@ -0,0 +1,2 @@ +var x = "Hello \"World\""; +var y = code("var z = \"Hello \\"World\\"\";");