Basic parser, fixed bug in tokenizer.

This commit is contained in:
Marvin Blum
2015-09-21 14:46:39 +02:00
parent 651c7451d1
commit 9754def486
6 changed files with 154 additions and 7 deletions

BIN
bin/main

Binary file not shown.

View File

@@ -1,2 +1,9 @@
var a = 1; var _a = 1;
var b = 2; var _b = 2;
if _a < _b {
var _x = 5;
}
else{
var _x = 6;
}

Binary file not shown.

View File

@@ -4,4 +4,126 @@ import (
) )
const TAB = " "
var tokens []Token
var tokenIndex int
var out string
var offset int
func Parse(token []Token) string {
initParser(token)
for tokenIndex < len(token) {
parseBlock()
}
return out
}
// parser functions
func parseBlock() {
if get().token == "var" {
parseVar()
} else if get().token == "if" {
parseIf()
} else {
parseStatement()
}
}
func parseVar() {
expect("var")
appendOut(get().token)
next()
if accept("=") {
next()
appendOut(" = "+get().token)
next()
}
appendOut(";\n")
expect(";")
}
func parseIf() {
expect("if")
appendOut("if (")
parseCondition()
appendOut(") then {\n")
expect("{")
parseBlock()
expect("}")
if accept("else") {
next()
expect("{")
appendOut("} else {\n")
parseBlock()
expect("}")
}
appendOut("};")
}
func parseCondition() {
for get().token != "{" {
appendOut(get().token)
next()
if get().token != "{" {
appendOut(" ")
}
}
}
func parseStatement() {
}
// helper functions
func initParser(token []Token) {
if len(token) == 0 {
panic("No tokens provided")
}
tokens = token
tokenIndex = 0
out = ""
offset = 0
}
func accept(token string) bool {
return tokenEqual(token, get())
}
func expect(token string) {
if !tokenEqual(token, get()) {
panic("Parse error, expected '"+token+"' but was '"+get().token+"'")
}
next()
}
func next() {
tokenIndex++
}
func get() Token {
if tokenIndex >= len(tokens) {
panic("No more tokens")
}
return tokens[tokenIndex]
}
func tokenEqual(a string, b Token) bool {
return a == b.token
}
func appendOut(str string) {
out += str
}

View File

@@ -8,8 +8,17 @@ type Token struct{
token string token string
} }
var delimiter = []byte{'=', ';'} var delimiter = []byte{'=',
var keywords = []string{"var"} ';',
'{',
'}',
'<',
'>',
'!'}
var keywords = []string{"var",
"if"}
var whitespace = []byte{' ', '\n', '\t'} var whitespace = []byte{' ', '\n', '\t'}
func Tokenize(code []byte) []Token { func Tokenize(code []byte) []Token {
@@ -20,7 +29,10 @@ func Tokenize(code []byte) []Token {
c := code[i] c := code[i]
if byteArrayContains(delimiter, c) { if byteArrayContains(delimiter, c) {
if token != "" {
tokens = append(tokens, Token{token}) tokens = append(tokens, Token{token})
}
tokens = append(tokens, Token{string(c)}) tokens = append(tokens, Token{string(c)})
token = "" token = ""
} else if stringArrayContains(keywords, token) { } else if stringArrayContains(keywords, token) {
@@ -32,9 +44,11 @@ func Tokenize(code []byte) []Token {
} }
// TEST // TEST
fmt.Println("Tokens:")
for i := range tokens { for i := range tokens {
fmt.Println(tokens[i].token) fmt.Println(tokens[i].token)
} }
fmt.Println("---")
return tokens return tokens
} }

View File

@@ -3,10 +3,14 @@ package main
import ( import (
"io/ioutil" "io/ioutil"
"asl" "asl"
"fmt"
) )
func main(){ func main(){
// read test file // read test file
code, _ := ioutil.ReadFile("in/simple.asl") code, _ := ioutil.ReadFile("in/simple.asl")
asl.Tokenize(code) token := asl.Tokenize(code)
out := asl.Parse(token)
fmt.Println(out)
} }