This commit is contained in:
Marvin Blum
2015-10-30 19:03:50 +01:00
parent ae8fad1abc
commit 6e4cef91bc
7 changed files with 49 additions and 40 deletions

View File

@@ -1 +1,3 @@
var inline_code = code("var a = 1;var b = 2;if a < b {foo();}"); func myFunc(a, b) {
}

View File

@@ -1,7 +1,8 @@
package main package asl
import ( import (
"asl" "parser"
"tokenizer"
"fmt" "fmt"
"io/ioutil" "io/ioutil"
"path/filepath" "path/filepath"
@@ -100,8 +101,8 @@ func compile(path string) {
continue continue
} }
token := asl.Tokenize(code) token := tokenizer.Tokenize(code)
compiler := asl.Compiler{} compiler := parser.Compiler{}
sqf := compiler.Parse(token, pretty) sqf := compiler.Parse(token, pretty)
os.MkdirAll(filepath.FromSlash(path+PathSeparator+aslFiles[i].out), 0777) os.MkdirAll(filepath.FromSlash(path+PathSeparator+aslFiles[i].out), 0777)

View File

@@ -1,12 +1,12 @@
package asl package parser
import ( import (
"tokenizer"
) )
// Parses tokens, validates code to a specific degree // Parses tokens, validates code to a specific degree
// and writes SQF code into desired location. // and writes SQF code into desired location.
func (c *Compiler) Parse(token []Token, prettyPrinting bool) string { func (c *Compiler) Parse(token []tokenizer.Token, prettyPrinting bool) string {
if !c.initParser(token, prettyPrinting) { if !c.initParser(token, prettyPrinting) {
return "" return ""
} }
@@ -54,7 +54,7 @@ func (c *Compiler) parseBlock() {
func (c *Compiler) parseVar() { func (c *Compiler) parseVar() {
c.expect("var") c.expect("var")
c.appendOut(c.get().token, false) c.appendOut(c.get().Token, false)
c.next() c.next()
if c.accept("=") { if c.accept("=") {
@@ -199,7 +199,7 @@ func (c *Compiler) parseForeach() {
func (c *Compiler) parseFunction() { func (c *Compiler) parseFunction() {
c.expect("func") c.expect("func")
c.appendOut(c.get().token+" = {", true) c.appendOut(c.get().Token+" = {", true)
c.next() c.next()
c.expect("(") c.expect("(")
c.parseFunctionParameter() c.parseFunctionParameter()
@@ -219,12 +219,12 @@ func (c *Compiler) parseFunctionParameter() {
c.appendOut("params [", false) c.appendOut("params [", false)
for !c.accept(")") { for !c.accept(")") {
name := c.get().token name := c.get().Token
c.next() c.next()
if c.accept("=") { if c.accept("=") {
c.next() c.next()
value := c.get().token value := c.get().Token
c.next() c.next()
c.appendOut("[\""+name+"\","+value+"]", false) c.appendOut("[\""+name+"\","+value+"]", false)
} else { } else {
@@ -292,13 +292,13 @@ func (c *Compiler) parseInlineCode() string {
c.expect("code") c.expect("code")
c.expect("(") c.expect("(")
code := c.get().token code := c.get().Token
c.next() c.next()
output := "{}" output := "{}"
if len(code) > 2 { if len(code) > 2 {
compiler := Compiler{} compiler := Compiler{}
output = "{"+compiler.Parse(Tokenize([]byte(code[1:len(code)-1])), false)+"}" output = "{"+compiler.Parse(tokenizer.Tokenize([]byte(code[1:len(code)-1])), false)+"}"
} }
c.expect(")") c.expect(")")
@@ -314,7 +314,7 @@ func (c *Compiler) parseStatement() {
} }
// variable or function name // variable or function name
name := c.get().token name := c.get().Token
c.next() c.next()
if c.accept("=") { if c.accept("=") {
@@ -445,21 +445,21 @@ func (c *Compiler) parseIdentifier() string {
if c.accept("code") { if c.accept("code") {
output += c.parseInlineCode() output += c.parseInlineCode()
} else if c.seek("(") && !c.accept("!") && !c.accept("-") { } else if c.seek("(") && !c.accept("!") && !c.accept("-") {
name := c.get().token name := c.get().Token
c.next() c.next()
output = "(" + c.parseFunctionCall(false, name) + ")" output = "(" + c.parseFunctionCall(false, name) + ")"
} else if c.seek("[") { } else if c.seek("[") {
output += "("+c.get().token output += "("+c.get().Token
c.next() c.next()
c.expect("[") c.expect("[")
output += " select ("+c.parseExpression(false)+"))" output += " select ("+c.parseExpression(false)+"))"
c.expect("]") c.expect("]")
} else if c.accept("!") || c.accept("-") { } else if c.accept("!") || c.accept("-") {
output = c.get().token output = c.get().Token
c.next() c.next()
output += c.parseTerm() output += c.parseTerm()
} else { } else {
output = c.get().token output = c.get().Token
c.next() c.next()
} }

View File

@@ -1,7 +1,11 @@
package asl package parser
import (
"tokenizer"
)
type Compiler struct { type Compiler struct {
tokens []Token tokens []tokenizer.Token
tokenIndex int tokenIndex int
out string out string
offset int offset int
@@ -9,7 +13,7 @@ type Compiler struct {
} }
// Initilizes the parser. // Initilizes the parser.
func (c *Compiler) initParser(token []Token, prettyPrinting bool) bool { func (c *Compiler) initParser(token []tokenizer.Token, prettyPrinting bool) bool {
if len(token) == 0 { if len(token) == 0 {
return false return false
} }
@@ -33,7 +37,7 @@ func (c *Compiler) accept(token string) bool {
// Throws if current token does not match expected one. // Throws if current token does not match expected one.
func (c *Compiler) expect(token string) { func (c *Compiler) expect(token string) {
if !c.tokenEqual(token, c.get()) { if !c.tokenEqual(token, c.get()) {
panic("Parse error, expected '" + token + "' but was '" + c.get().token + "'") panic("Parse error, expected '" + token + "' but was '" + c.get().Token + "'")
} }
c.next() c.next()
@@ -55,7 +59,7 @@ func (c *Compiler) next() {
} }
// Returns current token or throws, if no more tokens are available. // Returns current token or throws, if no more tokens are available.
func (c *Compiler) get() Token { func (c *Compiler) get() tokenizer.Token {
if c.tokenIndex >= len(c.tokens) { if c.tokenIndex >= len(c.tokens) {
panic("No more tokens") panic("No more tokens")
} }
@@ -69,8 +73,8 @@ func (c *Compiler) end() bool {
} }
// Checks if two strings match. // Checks if two strings match.
func (c *Compiler) tokenEqual(a string, b Token) bool { func (c *Compiler) tokenEqual(a string, b tokenizer.Token) bool {
return a == b.token return a == b.Token
} }
// Appends the output string to current SQF code output. // Appends the output string to current SQF code output.

View File

@@ -1,7 +1,8 @@
package asl_test package parser_test
import ( import (
"asl" "tokenizer"
"parser"
"io/ioutil" "io/ioutil"
"testing" "testing"
) )
@@ -161,8 +162,8 @@ func getCompiled(t *testing.T, file string) string {
t.FailNow() t.FailNow()
} }
tokens := asl.Tokenize(code) tokens := tokenizer.Tokenize(code)
compiler := asl.Compiler{} compiler := parser.Compiler{}
return compiler.Parse(tokens, true) return compiler.Parse(tokens, true)
} }

View File

@@ -1,11 +1,11 @@
package asl package tokenizer
import ( import (
"strings" "strings"
) )
type Token struct { type Token struct {
token string Token string
} }
var delimiter = []byte{ var delimiter = []byte{

View File

@@ -1,6 +1,7 @@
package asl package tokenizer_test
import ( import (
"tokenizer"
"io/ioutil" "io/ioutil"
"testing" "testing"
) )
@@ -85,13 +86,13 @@ func TestTokenizerInlineCode(t *testing.T) {
compareTokens(t, &got, &want) compareTokens(t, &got, &want)
} }
func compareLength(t *testing.T, got *[]Token, want *[]string) { func compareLength(t *testing.T, got *[]tokenizer.Token, want *[]string) {
if len(*got) != len(*want) { if len(*got) != len(*want) {
t.Error("Length of tokens got and expected tokens not equal, was:") t.Error("Length of tokens got and expected tokens not equal, was:")
gotlist, wantlist := "", "" gotlist, wantlist := "", ""
for i := range *got { for i := range *got {
gotlist += (*got)[i].token + " " gotlist += (*got)[i].Token + " "
} }
for i := range *want { for i := range *want {
@@ -105,15 +106,15 @@ func compareLength(t *testing.T, got *[]Token, want *[]string) {
} }
} }
func compareTokens(t *testing.T, got *[]Token, want *[]string) { func compareTokens(t *testing.T, got *[]tokenizer.Token, want *[]string) {
for i := range *got { for i := range *got {
if (*got)[i].token != (*want)[i] { if (*got)[i].Token != (*want)[i] {
t.Error("Tokens do not match: " + (*got)[i].token + " != " + (*want)[i]) t.Error("Tokens do not match: " + (*got)[i].Token + " != " + (*want)[i])
} }
} }
} }
func getTokens(t *testing.T, file string) []Token { func getTokens(t *testing.T, file string) []tokenizer.Token {
code, err := ioutil.ReadFile(file) code, err := ioutil.ReadFile(file)
if err != nil { if err != nil {
@@ -121,5 +122,5 @@ func getTokens(t *testing.T, file string) []Token {
t.FailNow() t.FailNow()
} }
return Tokenize(code) return tokenizer.Tokenize(code)
} }