From 4adc90d9a8e48a5c1841874cae0cb30c3deee6e1 Mon Sep 17 00:00:00 2001 From: Kyle Gunger Date: Sun, 20 Sep 2020 18:18:26 -0400 Subject: Basic updates - Move preprocessor to it's own file - Begin working on new delimiters - Begin working on blocks in the tree --- src/tparse/tokenize.go | 34 +++++++++++++++++++++++++++------- 1 file changed, 27 insertions(+), 7 deletions(-) (limited to 'src/tparse/tokenize.go') diff --git a/src/tparse/tokenize.go b/src/tparse/tokenize.go index 79a0605..f5e3f54 100644 --- a/src/tparse/tokenize.go +++ b/src/tparse/tokenize.go @@ -126,12 +126,32 @@ func stripBlockComments(t []Token) []Token { out := []Token{} bc := false for _, tok := range t { - if tok.Type == DELIMIT && tok.Data == "/#" { - bc = true - continue - } else if tok.Type == DELIMIT && tok.Data == "#/" { - bc = false - continue + + if tok.Type == DELIMIT { + switch tok.Data { + case ";#": + out = append(out, Token{DELIMIT, ";/", tok.Line, tok.Char}) + bc = true + continue + case ":#": + out = append(out, Token{DELIMIT, ":/", tok.Line, tok.Char}) + bc = true + continue + case "/#": + bc = true + continue + case "#;": + out = append(out, Token{DELIMIT, "/;", tok.Line, tok.Char}) + bc = false + continue + case "#:": + out = append(out, Token{DELIMIT, "/:", tok.Line, tok.Char}) + bc = false + continue + case "#/": + bc = false + continue + } } else if bc { continue } @@ -158,7 +178,7 @@ func TokenizeFile(path string) []Token { max := maxResRunes() - ln, cn, last := int(0), int(-1), int(0) + ln, cn, last := int(1), int(-1), int(0) sp := false for r := rune(' '); ; r, _, err = read.ReadRune() { -- cgit v1.2.3