From 276ab866680cdc021bd3c70edd22fe2451a81231 Mon Sep 17 00:00:00 2001 From: Marvin Blum Date: Sat, 31 Oct 2015 16:20:04 +0100 Subject: [PATCH] Added preprocessor line. --- src/parser/parser.go | 12 ++++++++- src/parser/parser_test.go | 7 +++++ src/tokenizer/tokenizer.go | 47 ++++++++++++++++++++++++++++----- src/tokenizer/tokenizer_test.go | 8 ++++++ test/tokenizer_preprocessor.asl | 2 ++ 5 files changed, 69 insertions(+), 7 deletions(-) create mode 100644 test/tokenizer_preprocessor.asl diff --git a/src/parser/parser.go b/src/parser/parser.go index fb5c068..d5eb3f9 100644 --- a/src/parser/parser.go +++ b/src/parser/parser.go @@ -4,6 +4,8 @@ import ( "tokenizer" ) +const new_line = "\r\n" + // Parses tokens, validates code to a specific degree // and writes SQF code into desired location. func (c *Compiler) Parse(token []tokenizer.Token, prettyPrinting bool) string { @@ -19,7 +21,9 @@ func (c *Compiler) Parse(token []tokenizer.Token, prettyPrinting bool) string { } func (c *Compiler) parseBlock() { - if c.accept("var") { + if c.get().Preprocessor { + c.parsePreprocessor() + } else if c.accept("var") { c.parseVar() } else if c.accept("if") { c.parseIf() @@ -52,6 +56,12 @@ func (c *Compiler) parseBlock() { } } +func (c *Compiler) parsePreprocessor() { + // we definitely want a new line here + c.appendOut(c.get().Token+new_line, false) + c.next() +} + func (c *Compiler) parseVar() { c.expect("var") c.appendOut(c.get().Token, false) diff --git a/src/parser/parser_test.go b/src/parser/parser_test.go index a6d2c0a..8926855 100644 --- a/src/parser/parser_test.go +++ b/src/parser/parser_test.go @@ -154,6 +154,13 @@ func TestParserInlineCode(t *testing.T) { equal(t, got, want) } +func TestParserPreprocessor(t *testing.T) { + got := getCompiled(t, "test/tokenizer_preprocessor.asl") + want := "#define HELLO_WORLD \"Hello World!\"\r\nhint HELLO_WORLD;\r\n" + + equal(t, got, want) +} + func getCompiled(t *testing.T, file string) string { code, err := ioutil.ReadFile(file) diff --git a/src/tokenizer/tokenizer.go b/src/tokenizer/tokenizer.go index e34c1d2..e36f559 100644 --- a/src/tokenizer/tokenizer.go +++ b/src/tokenizer/tokenizer.go @@ -6,6 +6,7 @@ import ( type Token struct { Token string + Preprocessor bool } var delimiter = []byte{ @@ -50,6 +51,8 @@ var keywords = []string{ var whitespace = []byte{' ', '\n', '\t', '\r'} var identifier = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_" +var preprocessor = byte('#') +var new_line = []byte{'\r', '\n'} // Tokenizes the given byte array into syntax tokens, // which can be parsed later. @@ -58,7 +61,7 @@ func Tokenize(code []byte) []Token { tokens := make([]Token, 0) token, mask, isstring := "", false, false - for i := range code { + for i := 0; i < len(code); i++ { c := code[i] // string masks (backslash) @@ -78,16 +81,19 @@ func Tokenize(code []byte) []Token { if isstring { token += string(c) } else { - // delimeter, keyword or variable/expression - if byteArrayContains(delimiter, c) { + // preprocessor, delimeter, keyword or variable/expression + if c == preprocessor { + tokens = append(tokens, preprocessorLine(code, &i)) + token = "" + } else if byteArrayContains(delimiter, c) { if token != "" { - tokens = append(tokens, Token{token}) + tokens = append(tokens, Token{token, false}) } - tokens = append(tokens, Token{string(c)}) + tokens = append(tokens, Token{string(c), false}) token = "" } else if stringArrayContains(strings.ToLower(token)) && !isIdentifierCharacter(c) { - tokens = append(tokens, Token{token}) + tokens = append(tokens, Token{token, false}) token = "" } else if !byteArrayContains(whitespace, c) { token += string(c) @@ -138,6 +144,35 @@ func removeComments(code []byte) []byte { return newcode[:j] } +// Reads preprocessor command until end of line +func preprocessorLine(code []byte, i *int) Token { + c := byte('0') + var line string + + for *i < len(code) { + c = code[*i] + + if byteArrayContains(new_line, c) { + break + } + + line += string(c) + (*i)++ + } + + // read all new line characters (\r and \n) + c = code[*i] + + for byteArrayContains(new_line, c) { + (*i)++ + c = code[*i] + } + + (*i)-- // for will count up 1, so subtract it here + + return Token{line, true} +} + // Returns the next character in code starting at i. // If no character is left, '0' will be returned. func nextChar(code []byte, i int) byte { diff --git a/src/tokenizer/tokenizer_test.go b/src/tokenizer/tokenizer_test.go index 7b6ce82..8794073 100644 --- a/src/tokenizer/tokenizer_test.go +++ b/src/tokenizer/tokenizer_test.go @@ -86,6 +86,14 @@ func TestTokenizerInlineCode(t *testing.T) { compareTokens(t, &got, &want) } +func TestTokenizerPreprocessor(t *testing.T) { + got := getTokens(t, "test/tokenizer_preprocessor.asl") + want := []string{"#define HELLO_WORLD \"Hello World!\"", "hint", "(", ")", "(", "HELLO_WORLD", ")", ";"} + + compareLength(t, &got, &want) + compareTokens(t, &got, &want) +} + func compareLength(t *testing.T, got *[]tokenizer.Token, want *[]string) { if len(*got) != len(*want) { t.Error("Length of tokens got and expected tokens not equal, was:") diff --git a/test/tokenizer_preprocessor.asl b/test/tokenizer_preprocessor.asl new file mode 100644 index 0000000..d029d85 --- /dev/null +++ b/test/tokenizer_preprocessor.asl @@ -0,0 +1,2 @@ +#define HELLO_WORLD "Hello World!" +hint()(HELLO_WORLD);