mirror of
https://github.com/Kugelschieber/asl.git
synced 2026-01-18 12:00:25 +00:00
go fmt.
This commit is contained in:
@@ -1,13 +1,13 @@
|
|||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"parser"
|
|
||||||
"tokenizer"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"path/filepath"
|
|
||||||
"os"
|
"os"
|
||||||
|
"parser"
|
||||||
|
"path/filepath"
|
||||||
"strings"
|
"strings"
|
||||||
|
"tokenizer"
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
@@ -46,7 +46,7 @@ func flags(flag string) bool {
|
|||||||
|
|
||||||
if flag[0] == '-' {
|
if flag[0] == '-' {
|
||||||
if flag == "-v" {
|
if flag == "-v" {
|
||||||
fmt.Println("asl version "+version)
|
fmt.Println("asl version " + version)
|
||||||
exit = true
|
exit = true
|
||||||
} else if flag == "-r" {
|
} else if flag == "-r" {
|
||||||
recursive = true
|
recursive = true
|
||||||
@@ -75,13 +75,13 @@ func readAslFiles(path string) {
|
|||||||
name := dir[i].Name()
|
name := dir[i].Name()
|
||||||
|
|
||||||
if dir[i].IsDir() && recursive {
|
if dir[i].IsDir() && recursive {
|
||||||
readAslFiles(filepath.FromSlash(path+PathSeparator+name))
|
readAslFiles(filepath.FromSlash(path + PathSeparator + name))
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
if !dir[i].IsDir() && strings.ToLower(filepath.Ext(name)) == extension {
|
if !dir[i].IsDir() && strings.ToLower(filepath.Ext(name)) == extension {
|
||||||
in := filepath.FromSlash(path+PathSeparator+dir[i].Name())
|
in := filepath.FromSlash(path + PathSeparator + dir[i].Name())
|
||||||
out := filepath.FromSlash("./"+path[len(inDir):len(path)])
|
out := filepath.FromSlash("./" + path[len(inDir):len(path)])
|
||||||
newname := name[:len(name)-len(filepath.Ext(name))]
|
newname := name[:len(name)-len(filepath.Ext(name))]
|
||||||
|
|
||||||
file := ASLFile{in, out, newname}
|
file := ASLFile{in, out, newname}
|
||||||
@@ -92,12 +92,12 @@ func readAslFiles(path string) {
|
|||||||
|
|
||||||
func compile(path string) {
|
func compile(path string) {
|
||||||
for i := 0; i < len(aslFiles); i++ {
|
for i := 0; i < len(aslFiles); i++ {
|
||||||
out := filepath.FromSlash(path+PathSeparator+aslFiles[i].out+PathSeparator+aslFiles[i].newname+sqfextension)
|
out := filepath.FromSlash(path + PathSeparator + aslFiles[i].out + PathSeparator + aslFiles[i].newname + sqfextension)
|
||||||
fmt.Println(aslFiles[i].in+" -> "+out)
|
fmt.Println(aslFiles[i].in + " -> " + out)
|
||||||
code, err := ioutil.ReadFile(aslFiles[i].in)
|
code, err := ioutil.ReadFile(aslFiles[i].in)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
fmt.Println("Error reading file: "+aslFiles[i].in)
|
fmt.Println("Error reading file: " + aslFiles[i].in)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -109,7 +109,7 @@ func compile(path string) {
|
|||||||
err = ioutil.WriteFile(out, []byte(sqf), 0666)
|
err = ioutil.WriteFile(out, []byte(sqf), 0666)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
fmt.Println("Error writing file: "+aslFiles[i].out)
|
fmt.Println("Error writing file: " + aslFiles[i].out)
|
||||||
fmt.Println(err)
|
fmt.Println(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -125,7 +125,8 @@ func main() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
var i int
|
var i int
|
||||||
for i = 1; i < len(args) && flags(args[i]); i++ {}
|
for i = 1; i < len(args) && flags(args[i]); i++ {
|
||||||
|
}
|
||||||
|
|
||||||
if exit {
|
if exit {
|
||||||
return
|
return
|
||||||
|
|||||||
@@ -87,7 +87,7 @@ func (c *Compiler) parseArray(out bool) string {
|
|||||||
|
|
||||||
for c.accept(",") {
|
for c.accept(",") {
|
||||||
c.next()
|
c.next()
|
||||||
output += ","+c.parseExpression(false)
|
output += "," + c.parseExpression(false)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -314,7 +314,7 @@ func (c *Compiler) parseInlineCode() string {
|
|||||||
|
|
||||||
if len(code) > 2 {
|
if len(code) > 2 {
|
||||||
compiler := Compiler{}
|
compiler := Compiler{}
|
||||||
output = "{"+compiler.Parse(tokenizer.Tokenize([]byte(code[1:len(code)-1]), true), false)+"}"
|
output = "{" + compiler.Parse(tokenizer.Tokenize([]byte(code[1:len(code)-1]), true), false) + "}"
|
||||||
}
|
}
|
||||||
|
|
||||||
c.expect(")")
|
c.expect(")")
|
||||||
@@ -467,10 +467,10 @@ func (c *Compiler) parseIdentifier() string {
|
|||||||
} else if c.accept("[") {
|
} else if c.accept("[") {
|
||||||
output += c.parseArray(false)
|
output += c.parseArray(false)
|
||||||
} else if c.seek("[") {
|
} else if c.seek("[") {
|
||||||
output += "("+c.get().Token
|
output += "(" + c.get().Token
|
||||||
c.next()
|
c.next()
|
||||||
c.expect("[")
|
c.expect("[")
|
||||||
output += " select ("+c.parseExpression(false)+"))"
|
output += " select (" + c.parseExpression(false) + "))"
|
||||||
c.expect("]")
|
c.expect("]")
|
||||||
} else if c.accept("!") || c.accept("-") {
|
} else if c.accept("!") || c.accept("-") {
|
||||||
output = c.get().Token
|
output = c.get().Token
|
||||||
|
|||||||
@@ -38,7 +38,7 @@ func (c *Compiler) accept(token string) bool {
|
|||||||
// Throws if current token does not match expected one.
|
// Throws if current token does not match expected one.
|
||||||
func (c *Compiler) expect(token string) {
|
func (c *Compiler) expect(token string) {
|
||||||
if !c.tokenEqual(token, c.get()) {
|
if !c.tokenEqual(token, c.get()) {
|
||||||
panic("Parse error, expected '" + token + "' but was '" + c.get().Token + "' in line "+strconv.Itoa(c.get().Line)+" at "+strconv.Itoa(c.get().Column))
|
panic("Parse error, expected '" + token + "' but was '" + c.get().Token + "' in line " + strconv.Itoa(c.get().Line) + " at " + strconv.Itoa(c.get().Column))
|
||||||
}
|
}
|
||||||
|
|
||||||
c.next()
|
c.next()
|
||||||
|
|||||||
@@ -1,10 +1,10 @@
|
|||||||
package parser_test
|
package parser_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"tokenizer"
|
|
||||||
"parser"
|
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
|
"parser"
|
||||||
"testing"
|
"testing"
|
||||||
|
"tokenizer"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestParserDeclaration(t *testing.T) {
|
func TestParserDeclaration(t *testing.T) {
|
||||||
|
|||||||
@@ -60,7 +60,7 @@ var new_line = []byte{'\r', '\n'}
|
|||||||
// which can be parsed later.
|
// which can be parsed later.
|
||||||
func Tokenize(code []byte, doStripSlashes bool) []Token {
|
func Tokenize(code []byte, doStripSlashes bool) []Token {
|
||||||
if doStripSlashes {
|
if doStripSlashes {
|
||||||
code = stripSlashes(code);
|
code = stripSlashes(code)
|
||||||
}
|
}
|
||||||
|
|
||||||
code = removeComments(code)
|
code = removeComments(code)
|
||||||
|
|||||||
@@ -1,9 +1,9 @@
|
|||||||
package tokenizer_test
|
package tokenizer_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"tokenizer"
|
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"testing"
|
"testing"
|
||||||
|
"tokenizer"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestTokenizerVar(t *testing.T) {
|
func TestTokenizerVar(t *testing.T) {
|
||||||
|
|||||||
Reference in New Issue
Block a user