mirror of
https://github.com/Kugelschieber/asl.git
synced 2026-01-18 12:00:25 +00:00
Issue #23.
This commit is contained in:
@@ -101,7 +101,7 @@ func compile(path string) {
|
||||
continue
|
||||
}
|
||||
|
||||
token := tokenizer.Tokenize(code)
|
||||
token := tokenizer.Tokenize(code, false)
|
||||
compiler := parser.Compiler{}
|
||||
sqf := compiler.Parse(token, pretty)
|
||||
|
||||
|
||||
@@ -314,7 +314,7 @@ func (c *Compiler) parseInlineCode() string {
|
||||
|
||||
if len(code) > 2 {
|
||||
compiler := Compiler{}
|
||||
output = "{"+compiler.Parse(tokenizer.Tokenize([]byte(code[1:len(code)-1])), false)+"}"
|
||||
output = "{"+compiler.Parse(tokenizer.Tokenize([]byte(code[1:len(code)-1]), true), false)+"}"
|
||||
}
|
||||
|
||||
c.expect(")")
|
||||
|
||||
@@ -58,7 +58,11 @@ var new_line = []byte{'\r', '\n'}
|
||||
|
||||
// Tokenizes the given byte array into syntax tokens,
|
||||
// which can be parsed later.
|
||||
func Tokenize(code []byte) []Token {
|
||||
func Tokenize(code []byte, doStripSlashes bool) []Token {
|
||||
if doStripSlashes {
|
||||
code = stripSlashes(code);
|
||||
}
|
||||
|
||||
code = removeComments(code)
|
||||
tokens := make([]Token, 0)
|
||||
token, mask, isstring, line, column := "", false, false, 0, 0
|
||||
@@ -114,6 +118,28 @@ func Tokenize(code []byte) []Token {
|
||||
return tokens
|
||||
}
|
||||
|
||||
// Removes slashes from input code.
|
||||
// This is used for the "code" keyword for correct strings in resulting code.
|
||||
func stripSlashes(code []byte) []byte {
|
||||
newcode := make([]byte, len(code))
|
||||
j, mask := 0, false
|
||||
|
||||
for i := 0; i < len(code); i++ {
|
||||
c := code[i]
|
||||
|
||||
if c == '\\' && !mask {
|
||||
mask = true
|
||||
continue
|
||||
}
|
||||
|
||||
newcode[j] = code[i]
|
||||
mask = false
|
||||
j++
|
||||
}
|
||||
|
||||
return newcode
|
||||
}
|
||||
|
||||
// Removes all comments from input byte array.
|
||||
// Comments are single line comments, starting with // (two slashes),
|
||||
// multi line comments with /* ... */ (slash star, star slash).
|
||||
|
||||
@@ -94,6 +94,16 @@ func TestTokenizerPreprocessor(t *testing.T) {
|
||||
compareTokens(t, &got, &want)
|
||||
}
|
||||
|
||||
func TestTokenizerMask(t *testing.T) {
|
||||
got := getTokens(t, "test/tokenizer_mask.asl")
|
||||
//var y = code("var z = \"Hello \\"World\\"\";");
|
||||
want := []string{"var", "x", "=", "\"Hello \\\"World\\\"\"", ";",
|
||||
"var", "y", "=", "code", "(", "\"var z = \\\"Hello \\\\\"World\\\\\"\\\";\"", ")", ";"}
|
||||
|
||||
compareLength(t, &got, &want)
|
||||
compareTokens(t, &got, &want)
|
||||
}
|
||||
|
||||
func compareLength(t *testing.T, got *[]tokenizer.Token, want *[]string) {
|
||||
if len(*got) != len(*want) {
|
||||
t.Error("Length of tokens got and expected tokens not equal, was:")
|
||||
@@ -130,5 +140,5 @@ func getTokens(t *testing.T, file string) []tokenizer.Token {
|
||||
t.FailNow()
|
||||
}
|
||||
|
||||
return tokenizer.Tokenize(code)
|
||||
return tokenizer.Tokenize(code, false)
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user