This commit is contained in:
Marvin Blum
2015-12-15 23:19:41 +01:00
parent f1f564d89b
commit 69e684a230
6 changed files with 227 additions and 226 deletions

View File

@@ -1,13 +1,13 @@
package main
import (
"parser"
"tokenizer"
"fmt"
"io/ioutil"
"path/filepath"
"os"
"parser"
"path/filepath"
"strings"
"tokenizer"
)
const (
@@ -46,7 +46,7 @@ func flags(flag string) bool {
if flag[0] == '-' {
if flag == "-v" {
fmt.Println("asl version "+version)
fmt.Println("asl version " + version)
exit = true
} else if flag == "-r" {
recursive = true
@@ -75,13 +75,13 @@ func readAslFiles(path string) {
name := dir[i].Name()
if dir[i].IsDir() && recursive {
readAslFiles(filepath.FromSlash(path+PathSeparator+name))
readAslFiles(filepath.FromSlash(path + PathSeparator + name))
continue
}
if !dir[i].IsDir() && strings.ToLower(filepath.Ext(name)) == extension {
in := filepath.FromSlash(path+PathSeparator+dir[i].Name())
out := filepath.FromSlash("./"+path[len(inDir):len(path)])
in := filepath.FromSlash(path + PathSeparator + dir[i].Name())
out := filepath.FromSlash("./" + path[len(inDir):len(path)])
newname := name[:len(name)-len(filepath.Ext(name))]
file := ASLFile{in, out, newname}
@@ -92,12 +92,12 @@ func readAslFiles(path string) {
func compile(path string) {
for i := 0; i < len(aslFiles); i++ {
out := filepath.FromSlash(path+PathSeparator+aslFiles[i].out+PathSeparator+aslFiles[i].newname+sqfextension)
fmt.Println(aslFiles[i].in+" -> "+out)
out := filepath.FromSlash(path + PathSeparator + aslFiles[i].out + PathSeparator + aslFiles[i].newname + sqfextension)
fmt.Println(aslFiles[i].in + " -> " + out)
code, err := ioutil.ReadFile(aslFiles[i].in)
if err != nil {
fmt.Println("Error reading file: "+aslFiles[i].in)
fmt.Println("Error reading file: " + aslFiles[i].in)
continue
}
@@ -109,7 +109,7 @@ func compile(path string) {
err = ioutil.WriteFile(out, []byte(sqf), 0666)
if err != nil {
fmt.Println("Error writing file: "+aslFiles[i].out)
fmt.Println("Error writing file: " + aslFiles[i].out)
fmt.Println(err)
}
}
@@ -125,7 +125,8 @@ func main() {
}
var i int
for i = 1; i < len(args) && flags(args[i]); i++ {}
for i = 1; i < len(args) && flags(args[i]); i++ {
}
if exit {
return

View File

@@ -87,7 +87,7 @@ func (c *Compiler) parseArray(out bool) string {
for c.accept(",") {
c.next()
output += ","+c.parseExpression(false)
output += "," + c.parseExpression(false)
}
}
@@ -314,7 +314,7 @@ func (c *Compiler) parseInlineCode() string {
if len(code) > 2 {
compiler := Compiler{}
output = "{"+compiler.Parse(tokenizer.Tokenize([]byte(code[1:len(code)-1]), true), false)+"}"
output = "{" + compiler.Parse(tokenizer.Tokenize([]byte(code[1:len(code)-1]), true), false) + "}"
}
c.expect(")")
@@ -467,10 +467,10 @@ func (c *Compiler) parseIdentifier() string {
} else if c.accept("[") {
output += c.parseArray(false)
} else if c.seek("[") {
output += "("+c.get().Token
output += "(" + c.get().Token
c.next()
c.expect("[")
output += " select ("+c.parseExpression(false)+"))"
output += " select (" + c.parseExpression(false) + "))"
c.expect("]")
} else if c.accept("!") || c.accept("-") {
output = c.get().Token

View File

@@ -38,7 +38,7 @@ func (c *Compiler) accept(token string) bool {
// Throws if current token does not match expected one.
func (c *Compiler) expect(token string) {
if !c.tokenEqual(token, c.get()) {
panic("Parse error, expected '" + token + "' but was '" + c.get().Token + "' in line "+strconv.Itoa(c.get().Line)+" at "+strconv.Itoa(c.get().Column))
panic("Parse error, expected '" + token + "' but was '" + c.get().Token + "' in line " + strconv.Itoa(c.get().Line) + " at " + strconv.Itoa(c.get().Column))
}
c.next()

View File

@@ -1,10 +1,10 @@
package parser_test
import (
"tokenizer"
"parser"
"io/ioutil"
"parser"
"testing"
"tokenizer"
)
func TestParserDeclaration(t *testing.T) {

View File

@@ -60,7 +60,7 @@ var new_line = []byte{'\r', '\n'}
// which can be parsed later.
func Tokenize(code []byte, doStripSlashes bool) []Token {
if doStripSlashes {
code = stripSlashes(code);
code = stripSlashes(code)
}
code = removeComments(code)

View File

@@ -1,9 +1,9 @@
package tokenizer_test
import (
"tokenizer"
"io/ioutil"
"testing"
"tokenizer"
)
func TestTokenizerVar(t *testing.T) {