Issue #17 and concurrent compiling.

This commit is contained in:
Marvin Blum
2015-12-15 23:53:47 +01:00
parent 29804f0d20
commit 4dfa2ee3ef
2 changed files with 53 additions and 21 deletions

View File

@@ -41,6 +41,7 @@ func usage() {
fmt.Println("<output directory> output directory, directory structure will be created corresponding to input directory") fmt.Println("<output directory> output directory, directory structure will be created corresponding to input directory")
} }
// Parses compiler flags.
func flags(flag string) bool { func flags(flag string) bool {
flag = strings.ToLower(flag) flag = strings.ToLower(flag)
@@ -63,6 +64,7 @@ func flags(flag string) bool {
return false return false
} }
// Creates a list of all ASL files to compile.
func readAslFiles(path string) { func readAslFiles(path string) {
dir, err := ioutil.ReadDir(path) dir, err := ioutil.ReadDir(path)
@@ -90,28 +92,57 @@ func readAslFiles(path string) {
} }
} }
// Recovers and prints thrown error.
func recoverCompileError(file string, waiter chan bool) {
if r := recover(); r != nil {
fmt.Println("Compile error in file "+file+":", r)
}
waiter <- true // the show must go on
}
// Compiles a single ASL file.
func compileFile(path string, file ASLFile, waiter chan bool) {
defer recoverCompileError(file.in, waiter)
// read file
out := filepath.FromSlash(path + PathSeparator + file.out + PathSeparator + file.newname + sqfextension)
fmt.Println(file.in + " -> " + out)
code, err := ioutil.ReadFile(file.in)
if err != nil {
fmt.Println("Error reading file: " + file.in)
return
}
// compile
token := tokenizer.Tokenize(code, false)
compiler := parser.Compiler{}
sqf := compiler.Parse(token, pretty)
os.MkdirAll(filepath.FromSlash(path+PathSeparator+file.out), 0777)
err = ioutil.WriteFile(out, []byte(sqf), 0666)
if err != nil {
fmt.Println("Error writing file: " + file.out)
fmt.Println(err)
}
waiter <- true // done
}
// Compiles ASL files concurrently.
func compile(path string) { func compile(path string) {
waiter := make(chan bool, len(aslFiles))
// fire compile
for i := 0; i < len(aslFiles); i++ { for i := 0; i < len(aslFiles); i++ {
out := filepath.FromSlash(path + PathSeparator + aslFiles[i].out + PathSeparator + aslFiles[i].newname + sqfextension) go compileFile(path, aslFiles[i], waiter)
fmt.Println(aslFiles[i].in + " -> " + out) }
code, err := ioutil.ReadFile(aslFiles[i].in)
if err != nil { // wait until all files are compiled
fmt.Println("Error reading file: " + aslFiles[i].in) for i := 0; i < len(aslFiles); i++ {
continue <-waiter
}
token := tokenizer.Tokenize(code, false)
compiler := parser.Compiler{}
sqf := compiler.Parse(token, pretty)
os.MkdirAll(filepath.FromSlash(path+PathSeparator+aslFiles[i].out), 0777)
err = ioutil.WriteFile(out, []byte(sqf), 0666)
if err != nil {
fmt.Println("Error writing file: " + aslFiles[i].out)
fmt.Println(err)
}
} }
} }

View File

@@ -1,6 +1,7 @@
package parser package parser
import ( import (
"errors"
"strconv" "strconv"
"tokenizer" "tokenizer"
) )
@@ -38,7 +39,7 @@ func (c *Compiler) accept(token string) bool {
// Throws if current token does not match expected one. // Throws if current token does not match expected one.
func (c *Compiler) expect(token string) { func (c *Compiler) expect(token string) {
if !c.tokenEqual(token, c.get()) { if !c.tokenEqual(token, c.get()) {
panic("Parse error, expected '" + token + "' but was '" + c.get().Token + "' in line " + strconv.Itoa(c.get().Line) + " at " + strconv.Itoa(c.get().Column)) panic(errors.New("Parse error, expected '" + token + "' but was '" + c.get().Token + "' in line " + strconv.Itoa(c.get().Line) + " at " + strconv.Itoa(c.get().Column)))
} }
c.next() c.next()
@@ -62,7 +63,7 @@ func (c *Compiler) next() {
// Returns current token or throws, if no more tokens are available. // Returns current token or throws, if no more tokens are available.
func (c *Compiler) get() tokenizer.Token { func (c *Compiler) get() tokenizer.Token {
if c.tokenIndex >= len(c.tokens) { if c.tokenIndex >= len(c.tokens) {
panic("No more tokens") panic(errors.New("No more tokens"))
} }
return c.tokens[c.tokenIndex] return c.tokens[c.tokenIndex]