diff --git a/.gitignore b/.gitignore
deleted file mode 100644
index 82de707..0000000
--- a/.gitignore
+++ /dev/null
@@ -1,4 +0,0 @@
-/bin/
-/pkg/
-/out/
-/in/
diff --git a/.project b/.project
deleted file mode 100644
index 70e02e7..0000000
--- a/.project
+++ /dev/null
@@ -1,17 +0,0 @@
-
-
- asl
-
-
-
-
-
- com.googlecode.goclipse.goBuilder
-
-
-
-
-
- com.googlecode.goclipse.core.goNature
-
-
diff --git a/src/main/asl.go b/asl.go
similarity index 92%
rename from src/main/asl.go
rename to asl.go
index 656a3c0..8da9cc0 100644
--- a/src/main/asl.go
+++ b/asl.go
@@ -1,8 +1,6 @@
package main
import (
- "parser"
- "tokenizer"
"fmt"
"io/ioutil"
"path/filepath"
@@ -15,6 +13,7 @@ const (
extension = ".asl"
sqfextension = ".sqf"
PathSeparator = string(os.PathSeparator)
+ new_line = "\r\n"
)
type ASLFile struct {
@@ -43,7 +42,7 @@ func usage() {
func flags(flag string) bool {
flag = strings.ToLower(flag)
-
+
if flag[0] == '-' {
if flag == "-v" {
fmt.Println("asl version "+version)
@@ -56,24 +55,24 @@ func flags(flag string) bool {
usage()
exit = true
}
-
+
return true
}
-
+
return false
}
func readAslFiles(path string) {
dir, err := ioutil.ReadDir(path)
-
+
if err != nil {
fmt.Println("Error reading in directory!")
return
}
-
+
for i := 0; i < len(dir); i++ {
name := dir[i].Name()
-
+
if dir[i].IsDir() && recursive {
readAslFiles(filepath.FromSlash(path+PathSeparator+name))
continue
@@ -82,8 +81,8 @@ func readAslFiles(path string) {
if !dir[i].IsDir() && strings.ToLower(filepath.Ext(name)) == extension {
in := filepath.FromSlash(path+PathSeparator+dir[i].Name())
out := filepath.FromSlash("./"+path[len(inDir):len(path)])
- newname := name[:len(name)-len(filepath.Ext(name))]
-
+ newname := name[:len(name)-len(filepath.Ext(name))]
+
file := ASLFile{in, out, newname}
aslFiles = append(aslFiles, file)
}
@@ -95,19 +94,19 @@ func compile(path string) {
out := filepath.FromSlash(path+PathSeparator+aslFiles[i].out+PathSeparator+aslFiles[i].newname+sqfextension)
fmt.Println(aslFiles[i].in+" -> "+out)
code, err := ioutil.ReadFile(aslFiles[i].in)
-
+
if err != nil {
fmt.Println("Error reading file: "+aslFiles[i].in)
continue
}
-
- token := tokenizer.Tokenize(code)
- compiler := parser.Compiler{}
+
+ token := Tokenize(code)
+ compiler := Compiler{}
sqf := compiler.Parse(token, pretty)
-
+
os.MkdirAll(filepath.FromSlash(path+PathSeparator+aslFiles[i].out), 0777)
err = ioutil.WriteFile(out, []byte(sqf), 0666)
-
+
if err != nil {
fmt.Println("Error writing file: "+aslFiles[i].out)
fmt.Println(err)
@@ -117,34 +116,34 @@ func compile(path string) {
func main() {
args := os.Args
-
+
// flags
if len(args) < 2 {
usage()
return
}
-
+
var i int
for i = 1; i < len(args) && flags(args[i]); i++ {}
-
+
if exit {
return
}
-
+
// in/out parameter
out := ""
-
+
if i < len(args) {
inDir = args[i]
i++
} else {
return
}
-
+
if i < len(args) {
out = args[i]
}
-
+
readAslFiles(inDir)
compile(out)
}
diff --git a/asl_test.go b/asl_test.go
new file mode 100644
index 0000000..2ad3456
--- /dev/null
+++ b/asl_test.go
@@ -0,0 +1,327 @@
+package main
+
+import (
+ "io/ioutil"
+ "testing"
+)
+
+type Want struct {
+ tokens []string
+ parser string
+}
+
+type Got struct {
+ tokens []Token
+ parser string
+}
+
+func TestArray(t *testing.T) {
+ got := getCompiled(t, "test/array.asl")
+ want := &Want{
+ []string{"var","x","=","[","1",",","2",",","3","]",";","var","y","=","x","[","1","]",";"},
+ "x = [1,2,3];\r\ny = (x select (1));\r\n",
+ }
+
+ equal(t, got, want)
+}
+
+func TestAssignResult(t *testing.T) {
+ got := getCompiled(t, "test/assign_result.asl")
+ want := &Want{
+ []string{"var", "x", "=", "foo", "(", "1", ",", "2", ",", "3", ")", ";", "y", "=", "bar", "(", "1", ",", "2", ",", "3", ")", ";"},
+ "x = ([1, 2, 3] call foo);\r\ny = ([1, 2, 3] call bar);\r\n",
+ }
+
+ equal(t, got, want)
+}
+
+func TestAssignment(t *testing.T) {
+ got := getCompiled(t, "test/assignment.asl")
+ want := &Want{
+ []string{"x", "=", "1", ";"},
+ "x = 1;\r\n",
+ }
+
+ equal(t, got, want)
+}
+
+func TestBuildinFunctionCall(t *testing.T) {
+ got := getCompiled(t, "test/buildin_func.asl")
+ want := &Want{
+ []string{"var","_x","=","setHit","(","getVar","(","player",",","foo",")","(","bar",")",")","(","\"head\"",",","\"tail\"",")",";"},
+ "_x = (([player, foo] getVar bar) setHit [\"head\", \"tail\"]);\r\n",
+ }
+
+ equal(t, got, want)
+}
+
+func TestCode(t *testing.T) {
+ got := getCompiled(t, "test/code.asl")
+ want := &Want{
+ []string{"var", "x", "=", "code", "(", "\"var x = 5;\"", ")", ";"},
+ "x = {x = 5;};\r\n",
+ }
+
+ equal(t, got, want)
+}
+
+func TestComment(t *testing.T) {
+ got := getCompiled(t, "test/comment.asl")
+ want := &Want{
+ []string{"var","x","=","1",";"},
+ "x = 1;\r\n",
+ }
+
+ equal(t, got, want)
+}
+
+
+func TestExitWith(t *testing.T) {
+ got := getCompiled(t, "test/exitwith.asl")
+ want := &Want{
+ []string{"exitwith","{","}"},
+ "if (true) exitWith {\r\n};\r\n",
+ }
+
+ equal(t, got, want)
+}
+
+func TestExpression1(t *testing.T) {
+ got := getCompiled(t, "test/expression1.asl")
+ want := &Want{
+ []string{"x","=","(","(","1","+","2","+","3",")","*","4","/","2",")","+","foo","(","1",",","2",",","3",")",";"},
+ "x = ((1+2+3)*4/2)+([1, 2, 3] call foo);\r\n",
+ }
+ equal(t, got, want)
+}
+
+func TestExpression2(t *testing.T) {
+ got := getCompiled(t, "test/expression2.asl")
+ want := &Want{
+ []string{"var","x","=","true","|","|","(","3",">","=","4","&","&","5","<","8",")",";"},
+ "x = true||(3>=4&&5<8);\r\n",
+ }
+
+ equal(t, got, want)
+}
+
+func TestExpression3(t *testing.T) {
+ got := getCompiled(t, "test/expression3.asl")
+ want := &Want{
+ []string{"var","x","=","-","(","1","+","(","2","+","3",")",")","/","(","6","*","(","someVariable","+","99","-","100",")",")","-","(","20",")","+","!","anotherVariable","+","foo","(",")",";"},
+ "x = -(1+(2+3))/(6*(someVariable+99-100))-(20)+!anotherVariable+([] call foo);\r\n",
+ }
+
+ equal(t, got, want)
+}
+
+func TestFor(t *testing.T) {
+ got := getCompiled(t, "test/for.asl")
+ want := &Want{
+ []string{"for","var","i","=","0",";","i","<","100",";","i","=","i","+","1","{","}"},
+ "for [{i=0}, {i<100}, {i=i+1}] do {\r\n};\r\n",
+ }
+
+ equal(t, got, want)
+}
+
+func TestForeach(t *testing.T) {
+ got := getCompiled(t, "test/foreach.asl")
+ want := &Want{
+ []string{"foreach", "unit", "=", ">", "allUnits", "{", "}"},
+ "{\r\nunit = _x;\r\n} forEach (allUnits);\r\n",
+ }
+
+ equal(t, got, want)
+}
+
+func TestFunction(t *testing.T) {
+ got := getCompiled(t, "test/func.asl")
+ want := &Want{
+ []string{"func", "TestFunction", "(", "param0", ",", "param1", ")", "{", "return", "true", ";", "}"},
+ "TestFunction = {\r\nparams [\"param0\",\"param1\"];\r\nreturn true;\r\n};\r\n",
+ }
+
+ equal(t, got, want)
+}
+
+func TestFunctionCall(t *testing.T) {
+ got := getCompiled(t, "test/func_call.asl")
+ want := &Want{
+ []string{"func","myFunc","(","a",",","b",")","{","return","a",">","b",";","}","myFunc","(","1","+","3","/","4",",","2","-","(","66","*","22",")","/","3","-","(","(","123",")",")",")",";"},
+ "myFunc = {\r\nparams [\"a\",\"b\"];\r\nreturn a>b;\r\n};\r\n[1+3/4, 2-(66*22)/3-((123))] call myFunc;\r\n",
+ }
+
+ equal(t, got, want)
+}
+
+func TestFunctionParams(t *testing.T) {
+ got := getCompiled(t, "test/func_params.asl")
+ want := &Want{
+ []string{"func","myFunc","(","a","=","1",",","b","=","2",")","{","return","a","+","b",";","}"},
+ "myFunc = {\r\nparams [[\"a\",1],[\"b\",2]];\r\nreturn a+b;\r\n};\r\n",
+ }
+
+ equal(t, got, want)
+}
+
+func TestIdentifier(t *testing.T) {
+ got := getCompiled(t, "test/identifier.asl")
+ want := &Want{
+ []string{"var","format","=","\"should not be for mat!\"",";"},
+ "format = \"should not be for mat!\";\r\n",
+ }
+
+ equal(t, got, want)
+}
+
+func TestIf(t *testing.T) {
+ got := getCompiled(t, "test/if.asl")
+ want := &Want{
+ []string{"if","a","<","b","{","}"},
+ "if (a","=","y","&","&","x","<","y","&","&","x",">","y","{","}"},
+ "if (x==y&&x!=y&&x<=y&&x>=y&&xy) then {\r\n};\r\n",
+ }
+
+
+ equal(t, got, want)
+}
+
+func TestPreprocessor(t *testing.T) {
+ got := getCompiled(t, "test/preprocessor.asl")
+ want := &Want{
+ []string{"#define HELLO_WORLD \"Hello World!\"", "hint", "(", ")", "(", "HELLO_WORLD", ")", ";"},
+ "\r\n#define HELLO_WORLD \"Hello World!\"\r\nhint HELLO_WORLD;\r\n",
+ }
+
+ equal(t, got, want)
+}
+
+func TestSwitch(t *testing.T) {
+ got := getCompiled(t, "test/switch.asl")
+ want := &Want{
+ []string{"switch","x","{","case","1",":","x","=","1",";","case","2",":","x","=","2",";","default",":","x","=","3",";","}"},
+ "switch (x) do {\r\ncase 1:\r\n{\r\nx = 1;\r\n};\r\ncase 2:\r\n{\r\nx = 2;\r\n};\r\ndefault:\r\n{\r\nx = 3;\r\n};\r\n};\r\n",
+ }
+
+ equal(t, got, want)
+}
+
+func TestTryCatch(t *testing.T) {
+ got := getCompiled(t, "test/try_catch.asl")
+ want := &Want{
+ []string{"try","{","}","catch","{","}"},
+ "try {\r\n} catch {\r\n};\r\n",
+ }
+ equal(t, got, want)
+}
+
+func TestWaitUntil(t *testing.T) {
+ got := getCompiled(t, "test/waituntil.asl")
+ want := &Want{
+ []string{"waituntil","(","x","=","x","+","1",";","x","<","100",")",";"},
+ "waitUntil {x=x+1;x<100};\r\n",
+ }
+
+ equal(t, got, want)
+}
+
+
+func TestWhile(t *testing.T) {
+ got := getCompiled(t, "test/while.asl")
+ want := &Want{
+ []string{"while", "true", "{", "}"},
+ "while {true} do {\r\n};",
+ }
+
+ equal(t, got, want)
+}
+
+func getCompiled(t *testing.T, file string) *Got {
+ code, err := ioutil.ReadFile(file)
+
+ if err != nil {
+ t.Error("Could not read test file: " + file)
+ t.FailNow()
+ }
+
+ tokens := Tokenize(code)
+ compiler := Compiler{}
+ parsed := compiler.Parse(tokens, true)
+
+ got := &Got{tokens, parsed}
+
+ return got
+}
+
+func compareLength(t *testing.T, got *Got, want *Want) {
+ if len(got.tokens) != len(want.tokens) {
+ t.Error("Length of tokens got and expected tokens not equal, was:")
+ gotlist, wantlist := "", ""
+
+ for i := range got.tokens {
+ gotlist += (got.tokens)[i].Token + " "
+ }
+
+ for i := range want.tokens {
+ wantlist += (want.tokens)[i] + " "
+ }
+
+ t.Log(gotlist)
+ t.Log("expected:")
+ t.Log(wantlist)
+ t.FailNow()
+ }
+
+
+}
+
+func compareTokens(t *testing.T, got *Got, want *Want) {
+ for i := range got.tokens {
+ if (got.tokens)[i].Token != (want.tokens)[i] {
+ t.Error("Tokens do not match: " + (got.tokens)[i].Token + " != " + (want.tokens)[i])
+ }
+ }
+}
+
+func equal(t *testing.T, got *Got, want *Want) {
+ compareLength(t, got, want)
+ compareTokens(t, got, want)
+
+ if got.parser != want.parser {
+ t.Error("Parsed does not equal, got:")
+ t.Log(got.parser)
+ t.Log("expected:")
+ t.Log(want.parser)
+ t.FailNow()
+ }
+}
diff --git a/src/parser/parser.go b/parser.go
similarity index 97%
rename from src/parser/parser.go
rename to parser.go
index 3e58b2a..5fc9358 100644
--- a/src/parser/parser.go
+++ b/parser.go
@@ -1,14 +1,8 @@
-package parser
-
-import (
- "tokenizer"
-)
-
-const new_line = "\r\n"
+package main
// Parses tokens, validates code to a specific degree
// and writes SQF code into desired location.
-func (c *Compiler) Parse(token []tokenizer.Token, prettyPrinting bool) string {
+func (c *Compiler) Parse(token []Token, prettyPrinting bool) string {
if !c.initParser(token, prettyPrinting) {
return ""
}
@@ -230,13 +224,13 @@ func (c *Compiler) parseFunctionParameter() {
if c.accept("{") {
return
}
-
+
c.appendOut("params [", false)
for !c.accept(")") {
name := c.get().Token
c.next()
-
+
if c.accept("=") {
c.next()
value := c.get().Token
@@ -251,7 +245,7 @@ func (c *Compiler) parseFunctionParameter() {
c.appendOut(",", false)
}
}
-
+
c.appendOut("];", true)
}
@@ -291,13 +285,13 @@ func (c *Compiler) parseWaitUntil() {
c.expect("(")
c.appendOut("waitUntil {", false)
c.parseExpression(true)
-
+
if c.accept(";") {
c.next()
c.appendOut(";", false)
c.parseExpression(true)
}
-
+
c.expect(")")
c.expect(";")
c.appendOut("};", true)
@@ -306,18 +300,18 @@ func (c *Compiler) parseWaitUntil() {
func (c *Compiler) parseInlineCode() string {
c.expect("code")
c.expect("(")
-
+
code := c.get().Token
c.next()
output := "{}"
-
+
if len(code) > 2 {
compiler := Compiler{}
- output = "{"+compiler.Parse(tokenizer.Tokenize([]byte(code[1:len(code)-1])), false)+"}"
+ output = "{"+compiler.Parse(Tokenize([]byte(code[1:len(code)-1])), false)+"}"
}
-
+
c.expect(")")
-
+
return output
}
diff --git a/src/parser/parserHelper.go b/parserHelper.go
similarity index 87%
rename from src/parser/parserHelper.go
rename to parserHelper.go
index 298158b..9882d05 100644
--- a/src/parser/parserHelper.go
+++ b/parserHelper.go
@@ -1,12 +1,11 @@
-package parser
+package main
import (
"strconv"
- "tokenizer"
)
type Compiler struct {
- tokens []tokenizer.Token
+ tokens []Token
tokenIndex int
out string
offset int
@@ -14,7 +13,7 @@ type Compiler struct {
}
// Initilizes the parser.
-func (c *Compiler) initParser(token []tokenizer.Token, prettyPrinting bool) bool {
+func (c *Compiler) initParser(token []Token, prettyPrinting bool) bool {
if len(token) == 0 {
return false
}
@@ -24,7 +23,7 @@ func (c *Compiler) initParser(token []tokenizer.Token, prettyPrinting bool) bool
c.out = ""
c.offset = 0
c.pretty = prettyPrinting
-
+
return true
}
@@ -60,7 +59,7 @@ func (c *Compiler) next() {
}
// Returns current token or throws, if no more tokens are available.
-func (c *Compiler) get() tokenizer.Token {
+func (c *Compiler) get() Token {
if c.tokenIndex >= len(c.tokens) {
panic("No more tokens")
}
@@ -74,7 +73,7 @@ func (c *Compiler) end() bool {
}
// Checks if two strings match.
-func (c *Compiler) tokenEqual(a string, b tokenizer.Token) bool {
+func (c *Compiler) tokenEqual(a string, b Token) bool {
return a == b.Token
}
diff --git a/src/parser/parser_test.go b/src/parser/parser_test.go
deleted file mode 100644
index 443fa44..0000000
--- a/src/parser/parser_test.go
+++ /dev/null
@@ -1,186 +0,0 @@
-package parser_test
-
-import (
- "tokenizer"
- "parser"
- "io/ioutil"
- "testing"
-)
-
-func TestParserDeclaration(t *testing.T) {
- got := getCompiled(t, "test/tokenizer_var.asl")
- want := "x = 1;\r\narray = [1,2,3];\r\n"
-
- equal(t, got, want)
-}
-
-func TestParserAssignment(t *testing.T) {
- got := getCompiled(t, "test/parser_assignment.asl")
- want := "x = 1;\r\n"
-
- equal(t, got, want)
-}
-
-func TestParserIf(t *testing.T) {
- got := getCompiled(t, "test/tokenizer_if.asl")
- want := "if (ab;\r\n};\r\n[1+3/4, 2-(66*22)/3-((123))] call myFunc;\r\n"
-
- equal(t, got, want)
-}
-
-func TestParserBuildinFunctionCall(t *testing.T) {
- got := getCompiled(t, "test/parser_buildin_func.asl")
- want := "_x = (([player, foo] getVar bar) setHit [\"head\", \"tail\"]);\r\n"
-
- equal(t, got, want)
-}
-
-func TestParserOperator(t *testing.T) {
- got := getCompiled(t, "test/parser_operator.asl")
- want := "if (x==y&&x!=y&&x<=y&&x>=y&&xy) then {\r\n};\r\n"
-
- equal(t, got, want)
-}
-
-func TestParserTryCatch(t *testing.T) {
- got := getCompiled(t, "test/parser_try_catch.asl")
- want := "try {\r\n} catch {\r\n};\r\n"
-
- equal(t, got, want)
-}
-
-func TestParserNegationFunctionCall(t *testing.T) {
- got := getCompiled(t, "test/parser_negation.asl")
- want := "x = !([] call foo);\r\n"
-
- equal(t, got, want)
-}
-
-func TestParserExitWith(t *testing.T) {
- got := getCompiled(t, "test/parser_exitwith.asl")
- want := "if (true) exitWith {\r\n};\r\n"
-
- equal(t, got, want)
-}
-
-func TestParserWaitUntil(t *testing.T) {
- got := getCompiled(t, "test/parser_waituntil.asl")
- want := "waitUntil {x=x+1;x<100};\r\n"
-
- equal(t, got, want)
-}
-
-func TestParserArray(t *testing.T) {
- got := getCompiled(t, "test/parser_array.asl")
- want := "x = [1,2,3];\r\ny = (x select (1));\r\n"
-
- equal(t, got, want)
-}
-
-func TestParserFunctionParams(t *testing.T) {
- got := getCompiled(t, "test/parser_func_params.asl")
- want := "myFunc = {\r\nparams [[\"a\",1],[\"b\",2]];\r\nreturn a+b;\r\n};\r\n"
-
- equal(t, got, want)
-}
-
-func TestParserInlineCode(t *testing.T) {
- got := getCompiled(t, "test/parser_code.asl")
- want := "inline_code = {a = 1;b = 2;if (a", "allUnits", "{", "}"}
-
- compareLength(t, &got, &want)
- compareTokens(t, &got, &want)
-}
-
-func TestTokenizerSwitch(t *testing.T) {
- got := getTokens(t, "test/tokenizer_switch.asl")
- want := []string{"switch", "x", "{", "case", "1", ":", "x", "=", "1", ";", "case", "2", ":", "x", "=", "2", ";", "default", ":", "x", "=", "3", ";", "}"}
-
- compareLength(t, &got, &want)
- compareTokens(t, &got, &want)
-}
-
-func TestTokenizerFunction(t *testing.T) {
- got := getTokens(t, "test/tokenizer_func.asl")
- want := []string{"func", "TestFunction", "(", "param0", ",", "param1", ")", "{", "return", "true", ";", "}"}
-
- compareLength(t, &got, &want)
- compareTokens(t, &got, &want)
-}
-
-func TestTokenizerExpression(t *testing.T) {
- got := getTokens(t, "test/tokenizer_expr.asl")
- want := []string{"x", "=", "(", "(", "1", "+", "2", "+", "3", ")", "*", "4", "/", "2", ")", "+", "foo", "(", "1", ",", "2", ",", "3", ")", ";"}
-
- compareLength(t, &got, &want)
- compareTokens(t, &got, &want)
-}
-
-func TestTokenizerIdentifier(t *testing.T) {
- got := getTokens(t, "test/tokenizer_identifier.asl")
- want := []string{"var", "format", "=", "\"should not be for mat!\"", ";"}
-
- compareLength(t, &got, &want)
- compareTokens(t, &got, &want)
-}
-
-func TestTokenizerInlineCode(t *testing.T) {
- got := getTokens(t, "test/tokenizer_code.asl")
- want := []string{"var", "x", "=", "code", "(", "\"var x = 5;\"", ")", ";"}
-
- compareLength(t, &got, &want)
- compareTokens(t, &got, &want)
-}
-
-func TestTokenizerPreprocessor(t *testing.T) {
- got := getTokens(t, "test/tokenizer_preprocessor.asl")
- want := []string{"#define HELLO_WORLD \"Hello World!\"", "hint", "(", ")", "(", "HELLO_WORLD", ")", ";"}
-
- compareLength(t, &got, &want)
- compareTokens(t, &got, &want)
-}
-
-func compareLength(t *testing.T, got *[]tokenizer.Token, want *[]string) {
- if len(*got) != len(*want) {
- t.Error("Length of tokens got and expected tokens not equal, was:")
- gotlist, wantlist := "", ""
-
- for i := range *got {
- gotlist += (*got)[i].Token + " "
- }
-
- for i := range *want {
- wantlist += (*want)[i] + " "
- }
-
- t.Log(gotlist)
- t.Log("expected:")
- t.Log(wantlist)
- t.FailNow()
- }
-}
-
-func compareTokens(t *testing.T, got *[]tokenizer.Token, want *[]string) {
- for i := range *got {
- if (*got)[i].Token != (*want)[i] {
- t.Error("Tokens do not match: " + (*got)[i].Token + " != " + (*want)[i])
- }
- }
-}
-
-func getTokens(t *testing.T, file string) []tokenizer.Token {
- code, err := ioutil.ReadFile(file)
-
- if err != nil {
- t.Error("Could not read test file: " + file)
- t.FailNow()
- }
-
- return tokenizer.Tokenize(code)
-}
diff --git a/test/parser_array.asl b/test/array.asl
similarity index 100%
rename from test/parser_array.asl
rename to test/array.asl
diff --git a/test/parser_assign_result.asl b/test/assign_result.asl
similarity index 100%
rename from test/parser_assign_result.asl
rename to test/assign_result.asl
diff --git a/test/parser_assignment.asl b/test/assignment.asl
similarity index 100%
rename from test/parser_assignment.asl
rename to test/assignment.asl
diff --git a/test/parser_buildin_func.asl b/test/buildin_func.asl
similarity index 100%
rename from test/parser_buildin_func.asl
rename to test/buildin_func.asl
diff --git a/test/tokenizer_code.asl b/test/code.asl
similarity index 100%
rename from test/tokenizer_code.asl
rename to test/code.asl
diff --git a/test/tokenizer_var.asl b/test/comment.asl
similarity index 72%
rename from test/tokenizer_var.asl
rename to test/comment.asl
index 83528d8..a701b14 100644
--- a/test/tokenizer_var.asl
+++ b/test/comment.asl
@@ -7,4 +7,3 @@ comment
*/
var x = 1;
-var array = [1, 2, 3];
diff --git a/test/parser_exitwith.asl b/test/exitwith.asl
similarity index 100%
rename from test/parser_exitwith.asl
rename to test/exitwith.asl
diff --git a/test/tokenizer_expr.asl b/test/expression1.asl
similarity index 100%
rename from test/tokenizer_expr.asl
rename to test/expression1.asl
diff --git a/test/parser_expression2.asl b/test/expression2.asl
similarity index 100%
rename from test/parser_expression2.asl
rename to test/expression2.asl
diff --git a/test/parser_expression.asl b/test/expression3.asl
similarity index 100%
rename from test/parser_expression.asl
rename to test/expression3.asl
diff --git a/test/tokenizer_for.asl b/test/for.asl
similarity index 100%
rename from test/tokenizer_for.asl
rename to test/for.asl
diff --git a/test/tokenizer_foreach.asl b/test/foreach.asl
similarity index 100%
rename from test/tokenizer_foreach.asl
rename to test/foreach.asl
diff --git a/test/tokenizer_func.asl b/test/func.asl
similarity index 100%
rename from test/tokenizer_func.asl
rename to test/func.asl
diff --git a/test/parser_func_call.asl b/test/func_call.asl
similarity index 100%
rename from test/parser_func_call.asl
rename to test/func_call.asl
diff --git a/test/parser_func_params.asl b/test/func_params.asl
similarity index 100%
rename from test/parser_func_params.asl
rename to test/func_params.asl
diff --git a/test/tokenizer_identifier.asl b/test/identifier.asl
similarity index 100%
rename from test/tokenizer_identifier.asl
rename to test/identifier.asl
diff --git a/test/tokenizer_if.asl b/test/if.asl
similarity index 100%
rename from test/tokenizer_if.asl
rename to test/if.asl
diff --git a/test/parser_code.asl b/test/inline_code.asl
similarity index 100%
rename from test/parser_code.asl
rename to test/inline_code.asl
diff --git a/test/parser_negation.asl b/test/negation.asl
similarity index 100%
rename from test/parser_negation.asl
rename to test/negation.asl
diff --git a/test/parser_operator.asl b/test/operator.asl
similarity index 100%
rename from test/parser_operator.asl
rename to test/operator.asl
diff --git a/test/tokenizer_preprocessor.asl b/test/preprocessor.asl
similarity index 100%
rename from test/tokenizer_preprocessor.asl
rename to test/preprocessor.asl
diff --git a/test/tokenizer_switch.asl b/test/switch.asl
similarity index 100%
rename from test/tokenizer_switch.asl
rename to test/switch.asl
diff --git a/test/parser_try_catch.asl b/test/try_catch.asl
similarity index 100%
rename from test/parser_try_catch.asl
rename to test/try_catch.asl
diff --git a/test/parser_waituntil.asl b/test/waituntil.asl
similarity index 100%
rename from test/parser_waituntil.asl
rename to test/waituntil.asl
diff --git a/test/tokenizer_while.asl b/test/while.asl
similarity index 100%
rename from test/tokenizer_while.asl
rename to test/while.asl
diff --git a/src/tokenizer/tokenizer.go b/tokenizer.go
similarity index 95%
rename from src/tokenizer/tokenizer.go
rename to tokenizer.go
index 8b6c731..a08801e 100644
--- a/src/tokenizer/tokenizer.go
+++ b/tokenizer.go
@@ -1,4 +1,4 @@
-package tokenizer
+package main
import (
"strings"
@@ -55,7 +55,7 @@ var keywords = []string{
var whitespace = []byte{' ', '\n', '\t', '\r'}
var identifier = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_"
var preprocessor = byte('#')
-var new_line = []byte{'\r', '\n'}
+var new_line_byte = []byte{'\r', '\n'}
// Tokenizes the given byte array into syntax tokens,
// which can be parsed later.
@@ -67,8 +67,8 @@ func Tokenize(code []byte) []Token {
for i := 0; i < len(code); i++ {
c := code[i]
column++
-
- if byteArrayContains(new_line, c) {
+
+ if byteArrayContains(new_line_byte, c) {
line++
column = 0
}
@@ -157,28 +157,28 @@ func removeComments(code []byte) []byte {
func preprocessorLine(code []byte, i *int, lineNr, column int) Token {
c := byte('0')
var line string
-
+
for *i < len(code) {
c = code[*i]
-
- if byteArrayContains(new_line, c) {
+
+ if byteArrayContains(new_line_byte, c) {
break
}
-
+
line += string(c)
(*i)++
}
-
+
// read all new line characters (\r and \n)
c = code[*i]
-
- for byteArrayContains(new_line, c) {
+
+ for byteArrayContains(new_line_byte, c) {
(*i)++
c = code[*i]
}
-
+
(*i)-- // for will count up 1, so subtract it here
-
+
return Token{line, true, lineNr, column}
}