diff options
author | Kyle Gunger <kgunger12@gmail.com> | 2023-08-10 18:29:59 -0400 |
---|---|---|
committer | Kyle Gunger <kgunger12@gmail.com> | 2023-08-10 18:29:59 -0400 |
commit | 0cc237617e4e853520b0509c02408d4170572d28 (patch) | |
tree | 3714eb635f6c9cc12ba064f3281d867ac4ffba31 | |
parent | 9ab49df341025eca8e641ee539355edb15e0fff2 (diff) |
Re-implement tokenizer functionality
-rw-r--r-- | tnslc/compiler.tnsl | 57 | ||||
-rw-r--r-- | tnslc/tnslc_wrapped.tnsl | 2 | ||||
-rw-r--r-- | tnslc/tokenizer.tnsl | 2 | ||||
-rw-r--r-- | tnslc/vector.tnsl | 7 |
4 files changed, 50 insertions, 18 deletions
diff --git a/tnslc/compiler.tnsl b/tnslc/compiler.tnsl index eb77b22..b61ff05 100644 --- a/tnslc/compiler.tnsl +++ b/tnslc/compiler.tnsl @@ -100,26 +100,25 @@ struct Path { ~uint8 pth = _alloc(1) pth{0} = 0 + ~uint8 w_ptr = self.split_path{0} + /; loop (int i = 0; i < self.path_count) [i++] - ~uint8 w_ptr = self.split_path{i} - + w_ptr = self.split_path{i} int old_len = cstr_len(pth) - int new_len = cstr_len(w_ptr) + old_len + int new_len = old_len + cstr_len(w_ptr) + + pth = _realloc(pth, new_len + 1) + pth{new_len} = 0 + /; loop (int j = 0; j < cstr_len(w_ptr)) [j++] + pth{old_len + j} = w_ptr{j} + ;/ + /; if (i < self.path_count - 1) pth = _realloc(pth, new_len + 2) pth{new_len} = '/' pth{new_len + 1} = 0 - old_len++ - new_len++ - ;; else - pth = _realloc(pth, new_len + 1) - pth{new_len} = 0 - ;/ - - /; loop (int j = 0; j < new_len - old_len) [j++] - pth{old_len + j} = w_ptr{j} ;/ ;/ @@ -140,6 +139,12 @@ struct Path { return out ;/ + /; print_all + /; loop (int i = 0; i < self.path_count) [i++] + _printf(self.split_path{i}) + ;/ + ;/ + /; _del /; loop (int i = 0; i < self.path_count) [i++] _delete(self.split_path{i}) @@ -149,10 +154,30 @@ struct Path { ;/ ;/ -# -# -# +######################################## +# Compiler functions - here be dragons # +######################################## + +/; compile_file (Path in) [CompData] + CompData out + out.start() -/; compile (Path in, out) + + return out +;/ + +/; compile (Path in, out) + ~void fin = in.open_read() + Vector v = tokenize_file(fin) + _close_file(fin) + + ~void fout = out.open_write() + /; loop (int i = 0; i < v.num_el) [i++] + ~Token t_ptr = v.get(i) + print_token(t_ptr`, fout) + t_ptr`._del() + ;/ + v._del() + _close_file(fout) ;/
\ No newline at end of file diff --git a/tnslc/tnslc_wrapped.tnsl b/tnslc/tnslc_wrapped.tnsl index 1e37d94..e82581a 100644 --- a/tnslc/tnslc_wrapped.tnsl +++ b/tnslc/tnslc_wrapped.tnsl @@ -2,7 +2,7 @@ :include "logging.tnsl" :include "vector.tnsl" :include "utils.tnsl" -# :include "tokenizer.tnsl" +:include "tokenizer.tnsl" :include "compiler.tnsl" {}uint8 m_usage = "Usage: tnslc [file to compile] [output asm]\n\0" diff --git a/tnslc/tokenizer.tnsl b/tnslc/tokenizer.tnsl index 08c0b52..62acd4a 100644 --- a/tnslc/tokenizer.tnsl +++ b/tnslc/tokenizer.tnsl @@ -243,7 +243,7 @@ struct Token { {}uint8 w_tkn_gen = "%d Tokens generated from file.\n\0" -/; tokenize_file (~void file_in, file_out) [Vector] +/; tokenize_file (~void file_in) [Vector] # This vector is going to store all of our tokens as we generate them Vector out_vect # The size of a token struct is 3 uint + pointer = 4*8 = 32 bytes diff --git a/tnslc/vector.tnsl b/tnslc/vector.tnsl index 87a49e9..73580e0 100644 --- a/tnslc/vector.tnsl +++ b/tnslc/vector.tnsl @@ -71,6 +71,13 @@ struct Vector { ;/ ;/ + # [UNSAFE] ONLY USE FOR STRING VECTORS + /; add_str (~uint8 dat) + /; loop (int i = 0; dat{i} !== 0) + self.push(dat + i) + ;/ + ;/ + /; start (int el_size) self.num_el = 0 self.el_size = el_size |