This commit is contained in:
Marvin Blum
2015-12-15 23:19:41 +01:00
parent f1f564d89b
commit 69e684a230
6 changed files with 227 additions and 226 deletions

View File

@@ -1,34 +1,34 @@
package main
import (
"parser"
"tokenizer"
"fmt"
"io/ioutil"
"path/filepath"
"os"
"parser"
"path/filepath"
"strings"
"tokenizer"
)
const (
version = "1.1.1"
extension = ".asl"
sqfextension = ".sqf"
PathSeparator = string(os.PathSeparator)
version = "1.1.1"
extension = ".asl"
sqfextension = ".sqf"
PathSeparator = string(os.PathSeparator)
)
type ASLFile struct {
in string
out string
newname string
in string
out string
newname string
}
var (
recursive bool = false
pretty bool = false
exit bool = false
aslFiles []ASLFile
inDir string
recursive bool = false
pretty bool = false
exit bool = false
aslFiles []ASLFile
inDir string
)
func usage() {
@@ -42,109 +42,110 @@ func usage() {
}
func flags(flag string) bool {
flag = strings.ToLower(flag)
if flag[0] == '-' {
if flag == "-v" {
fmt.Println("asl version "+version)
exit = true
} else if flag == "-r" {
recursive = true
} else if flag == "-pretty" {
pretty = true
} else if flag == "--help" {
usage()
exit = true
}
return true
}
return false
flag = strings.ToLower(flag)
if flag[0] == '-' {
if flag == "-v" {
fmt.Println("asl version " + version)
exit = true
} else if flag == "-r" {
recursive = true
} else if flag == "-pretty" {
pretty = true
} else if flag == "--help" {
usage()
exit = true
}
return true
}
return false
}
func readAslFiles(path string) {
dir, err := ioutil.ReadDir(path)
if err != nil {
fmt.Println("Error reading in directory!")
return
}
for i := 0; i < len(dir); i++ {
name := dir[i].Name()
if dir[i].IsDir() && recursive {
readAslFiles(filepath.FromSlash(path+PathSeparator+name))
continue
}
dir, err := ioutil.ReadDir(path)
if !dir[i].IsDir() && strings.ToLower(filepath.Ext(name)) == extension {
in := filepath.FromSlash(path+PathSeparator+dir[i].Name())
out := filepath.FromSlash("./"+path[len(inDir):len(path)])
newname := name[:len(name)-len(filepath.Ext(name))]
file := ASLFile{in, out, newname}
aslFiles = append(aslFiles, file)
}
}
if err != nil {
fmt.Println("Error reading in directory!")
return
}
for i := 0; i < len(dir); i++ {
name := dir[i].Name()
if dir[i].IsDir() && recursive {
readAslFiles(filepath.FromSlash(path + PathSeparator + name))
continue
}
if !dir[i].IsDir() && strings.ToLower(filepath.Ext(name)) == extension {
in := filepath.FromSlash(path + PathSeparator + dir[i].Name())
out := filepath.FromSlash("./" + path[len(inDir):len(path)])
newname := name[:len(name)-len(filepath.Ext(name))]
file := ASLFile{in, out, newname}
aslFiles = append(aslFiles, file)
}
}
}
func compile(path string) {
for i := 0; i < len(aslFiles); i++ {
out := filepath.FromSlash(path+PathSeparator+aslFiles[i].out+PathSeparator+aslFiles[i].newname+sqfextension)
fmt.Println(aslFiles[i].in+" -> "+out)
code, err := ioutil.ReadFile(aslFiles[i].in)
if err != nil {
fmt.Println("Error reading file: "+aslFiles[i].in)
continue
}
token := tokenizer.Tokenize(code, false)
compiler := parser.Compiler{}
sqf := compiler.Parse(token, pretty)
os.MkdirAll(filepath.FromSlash(path+PathSeparator+aslFiles[i].out), 0777)
err = ioutil.WriteFile(out, []byte(sqf), 0666)
if err != nil {
fmt.Println("Error writing file: "+aslFiles[i].out)
fmt.Println(err)
}
}
for i := 0; i < len(aslFiles); i++ {
out := filepath.FromSlash(path + PathSeparator + aslFiles[i].out + PathSeparator + aslFiles[i].newname + sqfextension)
fmt.Println(aslFiles[i].in + " -> " + out)
code, err := ioutil.ReadFile(aslFiles[i].in)
if err != nil {
fmt.Println("Error reading file: " + aslFiles[i].in)
continue
}
token := tokenizer.Tokenize(code, false)
compiler := parser.Compiler{}
sqf := compiler.Parse(token, pretty)
os.MkdirAll(filepath.FromSlash(path+PathSeparator+aslFiles[i].out), 0777)
err = ioutil.WriteFile(out, []byte(sqf), 0666)
if err != nil {
fmt.Println("Error writing file: " + aslFiles[i].out)
fmt.Println(err)
}
}
}
func main() {
args := os.Args
// flags
if len(args) < 2 {
usage()
return
usage()
return
}
var i int
for i = 1; i < len(args) && flags(args[i]); i++ {}
if exit {
return
for i = 1; i < len(args) && flags(args[i]); i++ {
}
if exit {
return
}
// in/out parameter
out := ""
if i < len(args) {
inDir = args[i]
i++
inDir = args[i]
i++
} else {
return
return
}
if i < len(args) {
out = args[i]
out = args[i]
}
readAslFiles(inDir)
compile(out)
}

View File

@@ -10,7 +10,7 @@ const new_line = "\r\n"
// and writes SQF code into desired location.
func (c *Compiler) Parse(token []tokenizer.Token, prettyPrinting bool) string {
if !c.initParser(token, prettyPrinting) {
return ""
return ""
}
for c.tokenIndex < len(token) {
@@ -21,9 +21,9 @@ func (c *Compiler) Parse(token []tokenizer.Token, prettyPrinting bool) string {
}
func (c *Compiler) parseBlock() {
if c.get().Preprocessor {
c.parsePreprocessor()
} else if c.accept("var") {
if c.get().Preprocessor {
c.parsePreprocessor()
} else if c.accept("var") {
c.parseVar()
} else if c.accept("if") {
c.parseIf()
@@ -42,9 +42,9 @@ func (c *Compiler) parseBlock() {
} else if c.accept("try") {
c.parseTryCatch()
} else if c.accept("exitwith") {
c.parseExitWith()
c.parseExitWith()
} else if c.accept("waituntil") {
c.parseWaitUntil()
c.parseWaitUntil()
} else if c.accept("case") || c.accept("default") {
return
} else {
@@ -57,9 +57,9 @@ func (c *Compiler) parseBlock() {
}
func (c *Compiler) parsePreprocessor() {
// we definitely want a new line before and after
c.appendOut(new_line+c.get().Token+new_line, false)
c.next()
// we definitely want a new line before and after
c.appendOut(new_line+c.get().Token+new_line, false)
c.next()
}
func (c *Compiler) parseVar() {
@@ -78,7 +78,7 @@ func (c *Compiler) parseVar() {
}
func (c *Compiler) parseArray(out bool) string {
output := ""
output := ""
c.expect("[")
output += "["
@@ -87,17 +87,17 @@ func (c *Compiler) parseArray(out bool) string {
for c.accept(",") {
c.next()
output += ","+c.parseExpression(false)
output += "," + c.parseExpression(false)
}
}
c.expect("]")
output += "]"
if out {
c.appendOut(output, false)
c.appendOut(output, false)
}
return output
}
@@ -231,20 +231,20 @@ func (c *Compiler) parseFunctionParameter() {
if c.accept("{") {
return
}
c.appendOut("params [", false)
for !c.accept(")") {
name := c.get().Token
c.next()
if c.accept("=") {
c.next()
value := c.get().Token
c.next()
c.appendOut("[\""+name+"\","+value+"]", false)
c.next()
value := c.get().Token
c.next()
c.appendOut("[\""+name+"\","+value+"]", false)
} else {
c.appendOut("\""+name+"\"", false)
c.appendOut("\""+name+"\"", false)
}
if !c.accept(")") {
@@ -252,7 +252,7 @@ func (c *Compiler) parseFunctionParameter() {
c.appendOut(",", false)
}
}
c.appendOut("];", true)
}
@@ -279,47 +279,47 @@ func (c *Compiler) parseTryCatch() {
}
func (c *Compiler) parseExitWith() {
c.expect("exitwith")
c.expect("{")
c.appendOut("if (true) exitWith {", true)
c.parseBlock()
c.expect("}")
c.appendOut("};", true)
c.expect("exitwith")
c.expect("{")
c.appendOut("if (true) exitWith {", true)
c.parseBlock()
c.expect("}")
c.appendOut("};", true)
}
func (c *Compiler) parseWaitUntil() {
c.expect("waituntil")
c.expect("(")
c.appendOut("waitUntil {", false)
c.parseExpression(true)
if c.accept(";") {
c.next()
c.appendOut(";", false)
c.parseExpression(true)
}
c.expect(")")
c.expect(";")
c.appendOut("};", true)
c.expect("waituntil")
c.expect("(")
c.appendOut("waitUntil {", false)
c.parseExpression(true)
if c.accept(";") {
c.next()
c.appendOut(";", false)
c.parseExpression(true)
}
c.expect(")")
c.expect(";")
c.appendOut("};", true)
}
func (c *Compiler) parseInlineCode() string {
c.expect("code")
c.expect("(")
code := c.get().Token
c.next()
output := "{}"
if len(code) > 2 {
compiler := Compiler{}
output = "{"+compiler.Parse(tokenizer.Tokenize([]byte(code[1:len(code)-1]), true), false)+"}"
}
c.expect(")")
return output
c.expect("code")
c.expect("(")
code := c.get().Token
c.next()
output := "{}"
if len(code) > 2 {
compiler := Compiler{}
output = "{" + compiler.Parse(tokenizer.Tokenize([]byte(code[1:len(code)-1]), true), false) + "}"
}
c.expect(")")
return output
}
// Everything that does not start with a keyword.
@@ -459,19 +459,19 @@ func (c *Compiler) parseIdentifier() string {
output := ""
if c.accept("code") {
output += c.parseInlineCode()
output += c.parseInlineCode()
} else if c.seek("(") && !c.accept("!") && !c.accept("-") {
name := c.get().Token
c.next()
output = "(" + c.parseFunctionCall(false, name) + ")"
} else if c.accept("[") {
output += c.parseArray(false)
output += c.parseArray(false)
} else if c.seek("[") {
output += "("+c.get().Token
c.next()
c.expect("[")
output += " select ("+c.parseExpression(false)+"))"
c.expect("]")
output += "(" + c.get().Token
c.next()
c.expect("[")
output += " select (" + c.parseExpression(false) + "))"
c.expect("]")
} else if c.accept("!") || c.accept("-") {
output = c.get().Token
c.next()

View File

@@ -1,16 +1,16 @@
package parser
import (
"strconv"
"tokenizer"
"strconv"
"tokenizer"
)
type Compiler struct {
tokens []tokenizer.Token
tokenIndex int
out string
offset int
pretty bool
tokens []tokenizer.Token
tokenIndex int
out string
offset int
pretty bool
}
// Initilizes the parser.
@@ -24,7 +24,7 @@ func (c *Compiler) initParser(token []tokenizer.Token, prettyPrinting bool) bool
c.out = ""
c.offset = 0
c.pretty = prettyPrinting
return true
}
@@ -38,7 +38,7 @@ func (c *Compiler) accept(token string) bool {
// Throws if current token does not match expected one.
func (c *Compiler) expect(token string) {
if !c.tokenEqual(token, c.get()) {
panic("Parse error, expected '" + token + "' but was '" + c.get().Token + "' in line "+strconv.Itoa(c.get().Line)+" at "+strconv.Itoa(c.get().Column))
panic("Parse error, expected '" + token + "' but was '" + c.get().Token + "' in line " + strconv.Itoa(c.get().Line) + " at " + strconv.Itoa(c.get().Column))
}
c.next()

View File

@@ -1,10 +1,10 @@
package parser_test
import (
"tokenizer"
"parser"
"io/ioutil"
"parser"
"testing"
"tokenizer"
)
func TestParserDeclaration(t *testing.T) {
@@ -50,9 +50,9 @@ func TestParserForeach(t *testing.T) {
}
func TestParserSwitch(t *testing.T) {
got := getCompiled(t, "test/tokenizer_switch.asl")
got := getCompiled(t, "test/tokenizer_switch.asl")
want := "switch (x) do {\r\ncase 1:\r\n{\r\nx = 1;\r\n};\r\ncase 2:\r\n{\r\nx = 2;\r\n};\r\ndefault:\r\n{\r\nx = 3;\r\n};\r\n};\r\n"
equal(t, got, want)
}

View File

@@ -5,10 +5,10 @@ import (
)
type Token struct {
Token string
Token string
Preprocessor bool
Line int
Column int
Line int
Column int
}
var delimiter = []byte{
@@ -59,10 +59,10 @@ var new_line = []byte{'\r', '\n'}
// Tokenizes the given byte array into syntax tokens,
// which can be parsed later.
func Tokenize(code []byte, doStripSlashes bool) []Token {
if doStripSlashes {
code = stripSlashes(code);
}
if doStripSlashes {
code = stripSlashes(code)
}
code = removeComments(code)
tokens := make([]Token, 0)
token, mask, isstring, line, column := "", false, false, 0, 0
@@ -70,10 +70,10 @@ func Tokenize(code []byte, doStripSlashes bool) []Token {
for i := 0; i < len(code); i++ {
c := code[i]
column++
if byteArrayContains(new_line, c) {
line++
column = 0
line++
column = 0
}
// string masks (backslash)
@@ -95,8 +95,8 @@ func Tokenize(code []byte, doStripSlashes bool) []Token {
} else {
// preprocessor, delimeter, keyword or variable/expression
if c == preprocessor {
tokens = append(tokens, preprocessorLine(code, &i, line, column))
token = ""
tokens = append(tokens, preprocessorLine(code, &i, line, column))
token = ""
} else if byteArrayContains(delimiter, c) {
if token != "" {
tokens = append(tokens, Token{token, false, line, column})
@@ -121,23 +121,23 @@ func Tokenize(code []byte, doStripSlashes bool) []Token {
// Removes slashes from input code.
// This is used for the "code" keyword for correct strings in resulting code.
func stripSlashes(code []byte) []byte {
newcode := make([]byte, len(code))
j, mask := 0, false
for i := 0; i < len(code); i++ {
c := code[i]
if c == '\\' && !mask {
newcode := make([]byte, len(code))
j, mask := 0, false
for i := 0; i < len(code); i++ {
c := code[i]
if c == '\\' && !mask {
mask = true
continue
}
newcode[j] = code[i]
mask = false
j++
}
return newcode
newcode[j] = code[i]
mask = false
j++
}
return newcode
}
// Removes all comments from input byte array.
@@ -180,31 +180,31 @@ func removeComments(code []byte) []byte {
// Reads preprocessor command until end of line
func preprocessorLine(code []byte, i *int, lineNr, column int) Token {
c := byte('0')
var line string
for *i < len(code) {
c = code[*i]
if byteArrayContains(new_line, c) {
break
}
line += string(c)
(*i)++
}
// read all new line characters (\r and \n)
c = code[*i]
for byteArrayContains(new_line, c) {
(*i)++
c = code[*i]
}
(*i)-- // for will count up 1, so subtract it here
return Token{line, true, lineNr, column}
c := byte('0')
var line string
for *i < len(code) {
c = code[*i]
if byteArrayContains(new_line, c) {
break
}
line += string(c)
(*i)++
}
// read all new line characters (\r and \n)
c = code[*i]
for byteArrayContains(new_line, c) {
(*i)++
c = code[*i]
}
(*i)-- // for will count up 1, so subtract it here
return Token{line, true, lineNr, column}
}
// Returns the next character in code starting at i.

View File

@@ -1,9 +1,9 @@
package tokenizer_test
import (
"tokenizer"
"io/ioutil"
"testing"
"tokenizer"
)
func TestTokenizerVar(t *testing.T) {
@@ -96,9 +96,9 @@ func TestTokenizerPreprocessor(t *testing.T) {
func TestTokenizerMask(t *testing.T) {
got := getTokens(t, "test/tokenizer_mask.asl")
//var y = code("var z = \"Hello \\"World\\"\";");
//var y = code("var z = \"Hello \\"World\\"\";");
want := []string{"var", "x", "=", "\"Hello \\\"World\\\"\"", ";",
"var", "y", "=", "code", "(", "\"var z = \\\"Hello \\\\\"World\\\\\"\\\";\"", ")", ";"}
"var", "y", "=", "code", "(", "\"var z = \\\"Hello \\\\\"World\\\\\"\\\";\"", ")", ";"}
compareLength(t, &got, &want)
compareTokens(t, &got, &want)