4 Commits
1.1.0 ... 1.1.1

Author SHA1 Message Date
Marvin Blum
f1f564d89b Updated version. 2015-11-14 16:48:24 +01:00
Marvin Blum
fa766cc78c Updated README. 2015-11-14 16:45:07 +01:00
Marvin Blum
59f02f7782 Issue #23. 2015-11-14 16:43:36 +01:00
Marvin Blum
1208bda68c Issue #21. 2015-11-14 16:22:39 +01:00
9 changed files with 78 additions and 25 deletions

View File

@@ -1,5 +1,10 @@
# Changelog # Changelog
**1.1.1**
* arrays can now be declared within expressions
* code keyword bug fix
**1.1.0** **1.1.0**
* changed syntax of foreach * changed syntax of foreach

View File

@@ -68,6 +68,9 @@ var one = array[0];
// accessing using a statement: // accessing using a statement:
var zwo = array[33/3-2]; var zwo = array[33/3-2];
// it is possble to use arrays in expressions:
var emptyArray = one-[0];
``` ```
### Control structures ### Control structures
@@ -252,18 +255,14 @@ The following features are not implemented yet, but will be in 1.1.0 or a future
* scopes * scopes
* else if * else if
* arrays within expressions (like someArray-[someEntity]) * selector in expression
scopes won't be supported, since they are a stupid concept and can be replaced by functions. scopes won't be supported, since they are a stupid concept and can be replaced by functions.
There is a simple workaround for arrays within expressions:
Selectors in expressions do not work (yet):
``` ```
// want: ... forEach allCurators-[myCurator]; var x = ([1, 2, 3]-[1, 2])[0]; // should result in 3
var myCuratorArray = [myCurator]; // or a more complex expression within array
foreach allCurators-myCuratorArray {
// ...
}
``` ```
## Contribute ## Contribute

View File

@@ -11,7 +11,7 @@ import (
) )
const ( const (
version = "1.1.0" version = "1.1.1"
extension = ".asl" extension = ".asl"
sqfextension = ".sqf" sqfextension = ".sqf"
PathSeparator = string(os.PathSeparator) PathSeparator = string(os.PathSeparator)
@@ -101,7 +101,7 @@ func compile(path string) {
continue continue
} }
token := tokenizer.Tokenize(code) token := tokenizer.Tokenize(code, false)
compiler := parser.Compiler{} compiler := parser.Compiler{}
sqf := compiler.Parse(token, pretty) sqf := compiler.Parse(token, pretty)

View File

@@ -70,34 +70,35 @@ func (c *Compiler) parseVar() {
if c.accept("=") { if c.accept("=") {
c.next() c.next()
c.appendOut(" = ", false) c.appendOut(" = ", false)
if c.accept("[") {
c.parseArray()
} else {
c.parseExpression(true) c.parseExpression(true)
} }
}
c.expect(";") c.expect(";")
c.appendOut(";", true) c.appendOut(";", true)
} }
func (c *Compiler) parseArray() { func (c *Compiler) parseArray(out bool) string {
output := ""
c.expect("[") c.expect("[")
c.appendOut("[", false) output += "["
if !c.accept("]") { if !c.accept("]") {
c.parseExpression(true) output += c.parseExpression(false)
for c.accept(",") { for c.accept(",") {
c.next() c.next()
c.appendOut(",", false) output += ","+c.parseExpression(false)
c.parseExpression(true)
} }
} }
c.expect("]") c.expect("]")
c.appendOut("]", false) output += "]"
if out {
c.appendOut(output, false)
}
return output
} }
func (c *Compiler) parseIf() { func (c *Compiler) parseIf() {
@@ -313,7 +314,7 @@ func (c *Compiler) parseInlineCode() string {
if len(code) > 2 { if len(code) > 2 {
compiler := Compiler{} compiler := Compiler{}
output = "{"+compiler.Parse(tokenizer.Tokenize([]byte(code[1:len(code)-1])), false)+"}" output = "{"+compiler.Parse(tokenizer.Tokenize([]byte(code[1:len(code)-1]), true), false)+"}"
} }
c.expect(")") c.expect(")")
@@ -463,6 +464,8 @@ func (c *Compiler) parseIdentifier() string {
name := c.get().Token name := c.get().Token
c.next() c.next()
output = "(" + c.parseFunctionCall(false, name) + ")" output = "(" + c.parseFunctionCall(false, name) + ")"
} else if c.accept("[") {
output += c.parseArray(false)
} else if c.seek("[") { } else if c.seek("[") {
output += "("+c.get().Token output += "("+c.get().Token
c.next() c.next()

View File

@@ -161,6 +161,13 @@ func TestParserPreprocessor(t *testing.T) {
equal(t, got, want) equal(t, got, want)
} }
func TestParserExpressionArray(t *testing.T) {
got := getCompiled(t, "test/parser_expression_array.asl")
want := "x = [1,2,3]-[2,3];\r\n"
equal(t, got, want)
}
func getCompiled(t *testing.T, file string) string { func getCompiled(t *testing.T, file string) string {
code, err := ioutil.ReadFile(file) code, err := ioutil.ReadFile(file)

View File

@@ -58,7 +58,11 @@ var new_line = []byte{'\r', '\n'}
// Tokenizes the given byte array into syntax tokens, // Tokenizes the given byte array into syntax tokens,
// which can be parsed later. // which can be parsed later.
func Tokenize(code []byte) []Token { func Tokenize(code []byte, doStripSlashes bool) []Token {
if doStripSlashes {
code = stripSlashes(code);
}
code = removeComments(code) code = removeComments(code)
tokens := make([]Token, 0) tokens := make([]Token, 0)
token, mask, isstring, line, column := "", false, false, 0, 0 token, mask, isstring, line, column := "", false, false, 0, 0
@@ -114,6 +118,28 @@ func Tokenize(code []byte) []Token {
return tokens return tokens
} }
// Removes slashes from input code.
// This is used for the "code" keyword for correct strings in resulting code.
func stripSlashes(code []byte) []byte {
newcode := make([]byte, len(code))
j, mask := 0, false
for i := 0; i < len(code); i++ {
c := code[i]
if c == '\\' && !mask {
mask = true
continue
}
newcode[j] = code[i]
mask = false
j++
}
return newcode
}
// Removes all comments from input byte array. // Removes all comments from input byte array.
// Comments are single line comments, starting with // (two slashes), // Comments are single line comments, starting with // (two slashes),
// multi line comments with /* ... */ (slash star, star slash). // multi line comments with /* ... */ (slash star, star slash).

View File

@@ -94,6 +94,16 @@ func TestTokenizerPreprocessor(t *testing.T) {
compareTokens(t, &got, &want) compareTokens(t, &got, &want)
} }
func TestTokenizerMask(t *testing.T) {
got := getTokens(t, "test/tokenizer_mask.asl")
//var y = code("var z = \"Hello \\"World\\"\";");
want := []string{"var", "x", "=", "\"Hello \\\"World\\\"\"", ";",
"var", "y", "=", "code", "(", "\"var z = \\\"Hello \\\\\"World\\\\\"\\\";\"", ")", ";"}
compareLength(t, &got, &want)
compareTokens(t, &got, &want)
}
func compareLength(t *testing.T, got *[]tokenizer.Token, want *[]string) { func compareLength(t *testing.T, got *[]tokenizer.Token, want *[]string) {
if len(*got) != len(*want) { if len(*got) != len(*want) {
t.Error("Length of tokens got and expected tokens not equal, was:") t.Error("Length of tokens got and expected tokens not equal, was:")
@@ -130,5 +140,5 @@ func getTokens(t *testing.T, file string) []tokenizer.Token {
t.FailNow() t.FailNow()
} }
return tokenizer.Tokenize(code) return tokenizer.Tokenize(code, false)
} }

View File

@@ -0,0 +1 @@
var x = [1, 2, 3]-[2, 3];

2
test/tokenizer_mask.asl Normal file
View File

@@ -0,0 +1,2 @@
var x = "Hello \"World\"";
var y = code("var z = \"Hello \\"World\\"\";");