mirror of
https://github.com/Kugelschieber/asl.git
synced 2026-01-18 12:00:25 +00:00
Issue #15.
This commit is contained in:
@@ -1 +1,3 @@
|
||||
var inline_code = code("var a = 1;var b = 2;if a < b {foo();}");
|
||||
func myFunc(a, b) {
|
||||
|
||||
}
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
package main
|
||||
package asl
|
||||
|
||||
import (
|
||||
"asl"
|
||||
"parser"
|
||||
"tokenizer"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"path/filepath"
|
||||
@@ -100,8 +101,8 @@ func compile(path string) {
|
||||
continue
|
||||
}
|
||||
|
||||
token := asl.Tokenize(code)
|
||||
compiler := asl.Compiler{}
|
||||
token := tokenizer.Tokenize(code)
|
||||
compiler := parser.Compiler{}
|
||||
sqf := compiler.Parse(token, pretty)
|
||||
|
||||
os.MkdirAll(filepath.FromSlash(path+PathSeparator+aslFiles[i].out), 0777)
|
||||
@@ -1,12 +1,12 @@
|
||||
package asl
|
||||
package parser
|
||||
|
||||
import (
|
||||
|
||||
"tokenizer"
|
||||
)
|
||||
|
||||
// Parses tokens, validates code to a specific degree
|
||||
// and writes SQF code into desired location.
|
||||
func (c *Compiler) Parse(token []Token, prettyPrinting bool) string {
|
||||
func (c *Compiler) Parse(token []tokenizer.Token, prettyPrinting bool) string {
|
||||
if !c.initParser(token, prettyPrinting) {
|
||||
return ""
|
||||
}
|
||||
@@ -54,7 +54,7 @@ func (c *Compiler) parseBlock() {
|
||||
|
||||
func (c *Compiler) parseVar() {
|
||||
c.expect("var")
|
||||
c.appendOut(c.get().token, false)
|
||||
c.appendOut(c.get().Token, false)
|
||||
c.next()
|
||||
|
||||
if c.accept("=") {
|
||||
@@ -199,7 +199,7 @@ func (c *Compiler) parseForeach() {
|
||||
|
||||
func (c *Compiler) parseFunction() {
|
||||
c.expect("func")
|
||||
c.appendOut(c.get().token+" = {", true)
|
||||
c.appendOut(c.get().Token+" = {", true)
|
||||
c.next()
|
||||
c.expect("(")
|
||||
c.parseFunctionParameter()
|
||||
@@ -219,12 +219,12 @@ func (c *Compiler) parseFunctionParameter() {
|
||||
c.appendOut("params [", false)
|
||||
|
||||
for !c.accept(")") {
|
||||
name := c.get().token
|
||||
name := c.get().Token
|
||||
c.next()
|
||||
|
||||
if c.accept("=") {
|
||||
c.next()
|
||||
value := c.get().token
|
||||
value := c.get().Token
|
||||
c.next()
|
||||
c.appendOut("[\""+name+"\","+value+"]", false)
|
||||
} else {
|
||||
@@ -292,13 +292,13 @@ func (c *Compiler) parseInlineCode() string {
|
||||
c.expect("code")
|
||||
c.expect("(")
|
||||
|
||||
code := c.get().token
|
||||
code := c.get().Token
|
||||
c.next()
|
||||
output := "{}"
|
||||
|
||||
if len(code) > 2 {
|
||||
compiler := Compiler{}
|
||||
output = "{"+compiler.Parse(Tokenize([]byte(code[1:len(code)-1])), false)+"}"
|
||||
output = "{"+compiler.Parse(tokenizer.Tokenize([]byte(code[1:len(code)-1])), false)+"}"
|
||||
}
|
||||
|
||||
c.expect(")")
|
||||
@@ -314,7 +314,7 @@ func (c *Compiler) parseStatement() {
|
||||
}
|
||||
|
||||
// variable or function name
|
||||
name := c.get().token
|
||||
name := c.get().Token
|
||||
c.next()
|
||||
|
||||
if c.accept("=") {
|
||||
@@ -445,21 +445,21 @@ func (c *Compiler) parseIdentifier() string {
|
||||
if c.accept("code") {
|
||||
output += c.parseInlineCode()
|
||||
} else if c.seek("(") && !c.accept("!") && !c.accept("-") {
|
||||
name := c.get().token
|
||||
name := c.get().Token
|
||||
c.next()
|
||||
output = "(" + c.parseFunctionCall(false, name) + ")"
|
||||
} else if c.seek("[") {
|
||||
output += "("+c.get().token
|
||||
output += "("+c.get().Token
|
||||
c.next()
|
||||
c.expect("[")
|
||||
output += " select ("+c.parseExpression(false)+"))"
|
||||
c.expect("]")
|
||||
} else if c.accept("!") || c.accept("-") {
|
||||
output = c.get().token
|
||||
output = c.get().Token
|
||||
c.next()
|
||||
output += c.parseTerm()
|
||||
} else {
|
||||
output = c.get().token
|
||||
output = c.get().Token
|
||||
c.next()
|
||||
}
|
||||
|
||||
@@ -1,7 +1,11 @@
|
||||
package asl
|
||||
package parser
|
||||
|
||||
import (
|
||||
"tokenizer"
|
||||
)
|
||||
|
||||
type Compiler struct {
|
||||
tokens []Token
|
||||
tokens []tokenizer.Token
|
||||
tokenIndex int
|
||||
out string
|
||||
offset int
|
||||
@@ -9,7 +13,7 @@ type Compiler struct {
|
||||
}
|
||||
|
||||
// Initilizes the parser.
|
||||
func (c *Compiler) initParser(token []Token, prettyPrinting bool) bool {
|
||||
func (c *Compiler) initParser(token []tokenizer.Token, prettyPrinting bool) bool {
|
||||
if len(token) == 0 {
|
||||
return false
|
||||
}
|
||||
@@ -33,7 +37,7 @@ func (c *Compiler) accept(token string) bool {
|
||||
// Throws if current token does not match expected one.
|
||||
func (c *Compiler) expect(token string) {
|
||||
if !c.tokenEqual(token, c.get()) {
|
||||
panic("Parse error, expected '" + token + "' but was '" + c.get().token + "'")
|
||||
panic("Parse error, expected '" + token + "' but was '" + c.get().Token + "'")
|
||||
}
|
||||
|
||||
c.next()
|
||||
@@ -55,7 +59,7 @@ func (c *Compiler) next() {
|
||||
}
|
||||
|
||||
// Returns current token or throws, if no more tokens are available.
|
||||
func (c *Compiler) get() Token {
|
||||
func (c *Compiler) get() tokenizer.Token {
|
||||
if c.tokenIndex >= len(c.tokens) {
|
||||
panic("No more tokens")
|
||||
}
|
||||
@@ -69,8 +73,8 @@ func (c *Compiler) end() bool {
|
||||
}
|
||||
|
||||
// Checks if two strings match.
|
||||
func (c *Compiler) tokenEqual(a string, b Token) bool {
|
||||
return a == b.token
|
||||
func (c *Compiler) tokenEqual(a string, b tokenizer.Token) bool {
|
||||
return a == b.Token
|
||||
}
|
||||
|
||||
// Appends the output string to current SQF code output.
|
||||
@@ -1,7 +1,8 @@
|
||||
package asl_test
|
||||
package parser_test
|
||||
|
||||
import (
|
||||
"asl"
|
||||
"tokenizer"
|
||||
"parser"
|
||||
"io/ioutil"
|
||||
"testing"
|
||||
)
|
||||
@@ -161,8 +162,8 @@ func getCompiled(t *testing.T, file string) string {
|
||||
t.FailNow()
|
||||
}
|
||||
|
||||
tokens := asl.Tokenize(code)
|
||||
compiler := asl.Compiler{}
|
||||
tokens := tokenizer.Tokenize(code)
|
||||
compiler := parser.Compiler{}
|
||||
|
||||
return compiler.Parse(tokens, true)
|
||||
}
|
||||
@@ -1,11 +1,11 @@
|
||||
package asl
|
||||
package tokenizer
|
||||
|
||||
import (
|
||||
"strings"
|
||||
)
|
||||
|
||||
type Token struct {
|
||||
token string
|
||||
Token string
|
||||
}
|
||||
|
||||
var delimiter = []byte{
|
||||
@@ -1,6 +1,7 @@
|
||||
package asl
|
||||
package tokenizer_test
|
||||
|
||||
import (
|
||||
"tokenizer"
|
||||
"io/ioutil"
|
||||
"testing"
|
||||
)
|
||||
@@ -85,13 +86,13 @@ func TestTokenizerInlineCode(t *testing.T) {
|
||||
compareTokens(t, &got, &want)
|
||||
}
|
||||
|
||||
func compareLength(t *testing.T, got *[]Token, want *[]string) {
|
||||
func compareLength(t *testing.T, got *[]tokenizer.Token, want *[]string) {
|
||||
if len(*got) != len(*want) {
|
||||
t.Error("Length of tokens got and expected tokens not equal, was:")
|
||||
gotlist, wantlist := "", ""
|
||||
|
||||
for i := range *got {
|
||||
gotlist += (*got)[i].token + " "
|
||||
gotlist += (*got)[i].Token + " "
|
||||
}
|
||||
|
||||
for i := range *want {
|
||||
@@ -105,15 +106,15 @@ func compareLength(t *testing.T, got *[]Token, want *[]string) {
|
||||
}
|
||||
}
|
||||
|
||||
func compareTokens(t *testing.T, got *[]Token, want *[]string) {
|
||||
func compareTokens(t *testing.T, got *[]tokenizer.Token, want *[]string) {
|
||||
for i := range *got {
|
||||
if (*got)[i].token != (*want)[i] {
|
||||
t.Error("Tokens do not match: " + (*got)[i].token + " != " + (*want)[i])
|
||||
if (*got)[i].Token != (*want)[i] {
|
||||
t.Error("Tokens do not match: " + (*got)[i].Token + " != " + (*want)[i])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func getTokens(t *testing.T, file string) []Token {
|
||||
func getTokens(t *testing.T, file string) []tokenizer.Token {
|
||||
code, err := ioutil.ReadFile(file)
|
||||
|
||||
if err != nil {
|
||||
@@ -121,5 +122,5 @@ func getTokens(t *testing.T, file string) []Token {
|
||||
t.FailNow()
|
||||
}
|
||||
|
||||
return Tokenize(code)
|
||||
return tokenizer.Tokenize(code)
|
||||
}
|
||||
Reference in New Issue
Block a user