Added strings to tokenizer, added comments to code.

This commit is contained in:
Marvin Blum
2015-10-09 11:29:52 +02:00
parent 25d691eb66
commit e88a6b48a8
6 changed files with 71 additions and 17 deletions

2
.gitignore vendored Normal file
View File

@@ -0,0 +1,2 @@
/bin/
/pkg/

3
in/test.asl Normal file
View File

@@ -0,0 +1,3 @@
var _x = "This is a string!";
var _y = "";
var _z = "\"mask\"";

View File

@@ -6,6 +6,8 @@ import (
const TAB = " "
// Parses tokens, validates code to a specific degree
// and writes SQF code into desired location.
func Parse(token []Token) string {
initParser(token)

View File

@@ -5,6 +5,7 @@ var tokenIndex int
var out string
var offset int
// Initilizes the parser.
func initParser(token []Token) {
if len(token) == 0 {
panic("No tokens provided")
@@ -16,10 +17,14 @@ func initParser(token []Token) {
offset = 0
}
// Returns true, if current token matches expected one.
// Does not throw parse errors and checks if token is available.
func accept(token string) bool {
return tokenIndex < len(tokens) && tokenEqual(token, get())
}
// Hard version of "accept".
// Throws if current token does not match expected one.
func expect(token string) {
if !tokenEqual(token, get()) {
panic("Parse error, expected '" + token + "' but was '" + get().token + "'")
@@ -28,10 +33,12 @@ func expect(token string) {
next()
}
// Increases token counter, so that the next token is compared.
func next() {
tokenIndex++
}
// Returns current token or throws, if no more tokens are available.
func get() Token {
if tokenIndex >= len(tokens) {
panic("No more tokens")
@@ -40,14 +47,17 @@ func get() Token {
return tokens[tokenIndex]
}
// Returns true if the end of input code was reached.
func end() bool {
return tokenIndex == len(tokens)
}
// Checks if two strings match.
func tokenEqual(a string, b Token) bool {
return a == b.token
}
// Appends the output string to current SQF code output.
func appendOut(str string) {
out += str
}

View File

@@ -42,16 +42,36 @@ var keywords = []string{
var whitespace = []byte{' ', '\n', '\t'}
// Tokenizes the given byte array into syntax tokens,
// which can be parsed later.
func Tokenize(code []byte) []Token {
code = removeComments(code)
tokens := make([]Token, 0)
token := ""
token, mask, isstring := "", false, false
fmt.Println(string(code))
fmt.Println("CODE:\n"+string(code)) // TODO: remove
for i := range code {
c := code[i]
// string masks (backslash)
if c == '\\' && !mask {
token += "\\"
mask = true
continue
}
// string
if c == '"' && !mask {
token += "\""
isstring = !isstring
continue
}
if isstring {
token += string(c)
} else {
// delimeter, keyword or variable/expression
if byteArrayContains(delimiter, c) {
if token != "" {
tokens = append(tokens, Token{token})
@@ -67,9 +87,20 @@ func Tokenize(code []byte) []Token {
}
}
mask = false
}
fmt.Println("TOKENS:") // TODO: remove
for t := range tokens {
fmt.Println(tokens[t].token)
}
return tokens
}
// Removes all comments from input byte array.
// Comments are single line comments, starting with // (two slashes),
// multi line comments with /* ... */ (slash star, star slash).
func removeComments(code []byte) []byte {
newcode := make([]byte, len(code))
j := 0
@@ -92,6 +123,8 @@ func removeComments(code []byte) []byte {
return newcode[:j]
}
// Returns the next character in code starting at i.
// If no character is left, '0' will be returned.
func nextChar(code []byte, i int) byte {
i++
@@ -102,6 +135,7 @@ func nextChar(code []byte, i int) byte {
return '0'
}
// Used to skip a line if a single line comment was found.
func skipSingleLineComment(code []byte, i int) int {
for i < len(code) && code[i] != '\n' {
i++
@@ -110,6 +144,7 @@ func skipSingleLineComment(code []byte, i int) int {
return i
}
// Used to skip a block of characters if a multi line comment was found
func skipMultiLineComment(code []byte, i int) int {
for i < len(code) && !(code[i] == '*' && nextChar(code, i) == '/') {
i++
@@ -118,6 +153,7 @@ func skipMultiLineComment(code []byte, i int) int {
return i + 1
}
// Checks if a byte array (string) contains a delimeter.
func byteArrayContains(haystack []byte, needle byte) bool {
for i := range haystack {
if haystack[i] == needle {
@@ -128,6 +164,7 @@ func byteArrayContains(haystack []byte, needle byte) bool {
return false
}
// Checks if a byte array (string) contains a string delimeter.
func stringArrayContains(haystack []string, needle string) bool {
for i := range haystack {
if haystack[i] == needle {

View File

@@ -8,9 +8,9 @@ import (
func main() {
// read test file
code, _ := ioutil.ReadFile("in/simple.asl")
code, _ := ioutil.ReadFile("in/test.asl")
token := asl.Tokenize(code)
out := asl.Parse(token)
fmt.Println(out)
fmt.Println("OUTPUT:\n"+out) // TODO: remove
}