Added preprocessor line.

This commit is contained in:
Marvin Blum
2015-10-31 16:20:04 +01:00
parent dc4eaf4b0f
commit 276ab86668
5 changed files with 69 additions and 7 deletions

View File

@@ -4,6 +4,8 @@ import (
"tokenizer"
)
const new_line = "\r\n"
// Parses tokens, validates code to a specific degree
// and writes SQF code into desired location.
func (c *Compiler) Parse(token []tokenizer.Token, prettyPrinting bool) string {
@@ -19,7 +21,9 @@ func (c *Compiler) Parse(token []tokenizer.Token, prettyPrinting bool) string {
}
func (c *Compiler) parseBlock() {
if c.accept("var") {
if c.get().Preprocessor {
c.parsePreprocessor()
} else if c.accept("var") {
c.parseVar()
} else if c.accept("if") {
c.parseIf()
@@ -52,6 +56,12 @@ func (c *Compiler) parseBlock() {
}
}
func (c *Compiler) parsePreprocessor() {
// we definitely want a new line here
c.appendOut(c.get().Token+new_line, false)
c.next()
}
func (c *Compiler) parseVar() {
c.expect("var")
c.appendOut(c.get().Token, false)

View File

@@ -154,6 +154,13 @@ func TestParserInlineCode(t *testing.T) {
equal(t, got, want)
}
func TestParserPreprocessor(t *testing.T) {
got := getCompiled(t, "test/tokenizer_preprocessor.asl")
want := "#define HELLO_WORLD \"Hello World!\"\r\nhint HELLO_WORLD;\r\n"
equal(t, got, want)
}
func getCompiled(t *testing.T, file string) string {
code, err := ioutil.ReadFile(file)

View File

@@ -6,6 +6,7 @@ import (
type Token struct {
Token string
Preprocessor bool
}
var delimiter = []byte{
@@ -50,6 +51,8 @@ var keywords = []string{
var whitespace = []byte{' ', '\n', '\t', '\r'}
var identifier = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_"
var preprocessor = byte('#')
var new_line = []byte{'\r', '\n'}
// Tokenizes the given byte array into syntax tokens,
// which can be parsed later.
@@ -58,7 +61,7 @@ func Tokenize(code []byte) []Token {
tokens := make([]Token, 0)
token, mask, isstring := "", false, false
for i := range code {
for i := 0; i < len(code); i++ {
c := code[i]
// string masks (backslash)
@@ -78,16 +81,19 @@ func Tokenize(code []byte) []Token {
if isstring {
token += string(c)
} else {
// delimeter, keyword or variable/expression
if byteArrayContains(delimiter, c) {
// preprocessor, delimeter, keyword or variable/expression
if c == preprocessor {
tokens = append(tokens, preprocessorLine(code, &i))
token = ""
} else if byteArrayContains(delimiter, c) {
if token != "" {
tokens = append(tokens, Token{token})
tokens = append(tokens, Token{token, false})
}
tokens = append(tokens, Token{string(c)})
tokens = append(tokens, Token{string(c), false})
token = ""
} else if stringArrayContains(strings.ToLower(token)) && !isIdentifierCharacter(c) {
tokens = append(tokens, Token{token})
tokens = append(tokens, Token{token, false})
token = ""
} else if !byteArrayContains(whitespace, c) {
token += string(c)
@@ -138,6 +144,35 @@ func removeComments(code []byte) []byte {
return newcode[:j]
}
// Reads preprocessor command until end of line
func preprocessorLine(code []byte, i *int) Token {
c := byte('0')
var line string
for *i < len(code) {
c = code[*i]
if byteArrayContains(new_line, c) {
break
}
line += string(c)
(*i)++
}
// read all new line characters (\r and \n)
c = code[*i]
for byteArrayContains(new_line, c) {
(*i)++
c = code[*i]
}
(*i)-- // for will count up 1, so subtract it here
return Token{line, true}
}
// Returns the next character in code starting at i.
// If no character is left, '0' will be returned.
func nextChar(code []byte, i int) byte {

View File

@@ -86,6 +86,14 @@ func TestTokenizerInlineCode(t *testing.T) {
compareTokens(t, &got, &want)
}
func TestTokenizerPreprocessor(t *testing.T) {
got := getTokens(t, "test/tokenizer_preprocessor.asl")
want := []string{"#define HELLO_WORLD \"Hello World!\"", "hint", "(", ")", "(", "HELLO_WORLD", ")", ";"}
compareLength(t, &got, &want)
compareTokens(t, &got, &want)
}
func compareLength(t *testing.T, got *[]tokenizer.Token, want *[]string) {
if len(*got) != len(*want) {
t.Error("Length of tokens got and expected tokens not equal, was:")