mirror of
https://github.com/Kugelschieber/asl.git
synced 2026-01-18 03:50:25 +00:00
Started testing with tokenizer.
This commit is contained in:
@@ -1,7 +1 @@
|
||||
func foo(){
|
||||
return 1;
|
||||
}
|
||||
|
||||
var _x = foo();
|
||||
|
||||
foo();
|
||||
var x = foo();
|
||||
|
||||
@@ -312,7 +312,7 @@ func parseExpression(out bool) string {
|
||||
} else if accept(")") {
|
||||
openingBrackets--
|
||||
}
|
||||
|
||||
|
||||
next()
|
||||
}
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@ var tokens []Token
|
||||
var tokenIndex int
|
||||
var out string
|
||||
var offset int
|
||||
//var pretty bool
|
||||
|
||||
// Initilizes the parser.
|
||||
func initParser(token []Token) {
|
||||
@@ -33,6 +34,16 @@ func expect(token string) {
|
||||
next()
|
||||
}
|
||||
|
||||
// Returns true, if the next token matches expected one.
|
||||
// Does not throw parse errors and checks if token is available.
|
||||
func seek(token string) bool {
|
||||
if tokenIndex+1 >= len(tokens) {
|
||||
return false
|
||||
}
|
||||
|
||||
return tokenEqual(token, tokens[tokenIndex+1])
|
||||
}
|
||||
|
||||
// Increases token counter, so that the next token is compared.
|
||||
func next() {
|
||||
tokenIndex++
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
package asl
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
@@ -49,8 +48,6 @@ func Tokenize(code []byte) []Token {
|
||||
tokens := make([]Token, 0)
|
||||
token, mask, isstring := "", false, false
|
||||
|
||||
fmt.Println("CODE:\n"+string(code)) // TODO: remove
|
||||
|
||||
for i := range code {
|
||||
c := code[i]
|
||||
|
||||
@@ -89,11 +86,6 @@ func Tokenize(code []byte) []Token {
|
||||
|
||||
mask = false
|
||||
}
|
||||
|
||||
fmt.Println("TOKENS:") // TODO: remove
|
||||
for t := range tokens {
|
||||
fmt.Println(tokens[t].token)
|
||||
}
|
||||
|
||||
return tokens
|
||||
}
|
||||
|
||||
95
src/asl/tokenizer_test.go
Normal file
95
src/asl/tokenizer_test.go
Normal file
@@ -0,0 +1,95 @@
|
||||
package asl
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"io/ioutil"
|
||||
)
|
||||
|
||||
func TestVar(t *testing.T) {
|
||||
got := getTokens(t, "test/tokenizer_var.asl")
|
||||
want := []string{"var", "x", "=", "1", ";"}
|
||||
|
||||
compareLength(t, &got, &want)
|
||||
compareTokens(t, &got, &want)
|
||||
}
|
||||
|
||||
func TestIf(t *testing.T) {
|
||||
got := getTokens(t, "test/tokenizer_if.asl")
|
||||
want := []string{"if", "a", "<", "b", "{", "}"}
|
||||
|
||||
compareLength(t, &got, &want)
|
||||
compareTokens(t, &got, &want)
|
||||
}
|
||||
|
||||
func TestWhile(t *testing.T) {
|
||||
got := getTokens(t, "test/tokenizer_while.asl")
|
||||
want := []string{"while", "true", "{", "}"}
|
||||
|
||||
compareLength(t, &got, &want)
|
||||
compareTokens(t, &got, &want)
|
||||
}
|
||||
|
||||
//for var i = 0; i < 100; i = i+1 {
|
||||
//}
|
||||
func TestFor(t *testing.T) {
|
||||
got := getTokens(t, "test/tokenizer_for.asl")
|
||||
want := []string{"for", "var", "i", "=", "0", ";", "i", "<", "100", ";", "i", "=", "i+1", "{", "}"}
|
||||
|
||||
compareLength(t, &got, &want)
|
||||
compareTokens(t, &got, &want)
|
||||
}
|
||||
|
||||
func TestEach(t *testing.T) {
|
||||
got := getTokens(t, "test/tokenizer_each.asl")
|
||||
want := []string{"each", "allUnits", "{", "}"}
|
||||
|
||||
compareLength(t, &got, &want)
|
||||
compareTokens(t, &got, &want)
|
||||
}
|
||||
|
||||
func TestFunction(t *testing.T) {
|
||||
got := getTokens(t, "test/tokenizer_func.asl")
|
||||
want := []string{"func", "TestFunction", "(", "param0", ",", "param1", ")", "{", "return", "true", ";", "}"}
|
||||
|
||||
compareLength(t, &got, &want)
|
||||
compareTokens(t, &got, &want)
|
||||
}
|
||||
|
||||
func compareLength(t *testing.T, got *[]Token, want *[]string) {
|
||||
if len(*got) != len(*want) {
|
||||
t.Error("Length of tokens got and expected tokens not equal, was:")
|
||||
gotlist, wantlist := "", ""
|
||||
|
||||
for i := range *got {
|
||||
gotlist += (*got)[i].token+" "
|
||||
}
|
||||
|
||||
for i := range *want {
|
||||
wantlist += (*want)[i]+" "
|
||||
}
|
||||
|
||||
t.Log(gotlist)
|
||||
t.Log("expected:")
|
||||
t.Log(wantlist)
|
||||
t.FailNow()
|
||||
}
|
||||
}
|
||||
|
||||
func compareTokens(t *testing.T, got *[]Token, want *[]string) {
|
||||
for i := range *got {
|
||||
if (*got)[i].token != (*want)[i] {
|
||||
t.Error("Tokens do not match: "+(*got)[i].token+" != "+(*want)[i])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func getTokens(t *testing.T, file string) []Token {
|
||||
code, err := ioutil.ReadFile(file)
|
||||
|
||||
if err != nil {
|
||||
t.Error("Could not read test file: "+file)
|
||||
t.FailNow()
|
||||
}
|
||||
|
||||
return Tokenize(code)
|
||||
}
|
||||
3
test/tokenizer_each.asl
Normal file
3
test/tokenizer_each.asl
Normal file
@@ -0,0 +1,3 @@
|
||||
each allUnits {
|
||||
// ...
|
||||
}
|
||||
3
test/tokenizer_for.asl
Normal file
3
test/tokenizer_for.asl
Normal file
@@ -0,0 +1,3 @@
|
||||
for var i = 0; i < 100; i = i+1 {
|
||||
// ...
|
||||
}
|
||||
3
test/tokenizer_func.asl
Normal file
3
test/tokenizer_func.asl
Normal file
@@ -0,0 +1,3 @@
|
||||
func TestFunction(param0, param1) {
|
||||
return true;
|
||||
}
|
||||
3
test/tokenizer_if.asl
Normal file
3
test/tokenizer_if.asl
Normal file
@@ -0,0 +1,3 @@
|
||||
if a < b {
|
||||
// ...
|
||||
}
|
||||
9
test/tokenizer_var.asl
Normal file
9
test/tokenizer_var.asl
Normal file
@@ -0,0 +1,9 @@
|
||||
// single line comment
|
||||
|
||||
/*
|
||||
multi
|
||||
line
|
||||
comment
|
||||
*/
|
||||
|
||||
var x = 1;
|
||||
3
test/tokenizer_while.asl
Normal file
3
test/tokenizer_while.asl
Normal file
@@ -0,0 +1,3 @@
|
||||
while true {
|
||||
// ...
|
||||
}
|
||||
Reference in New Issue
Block a user