mirror of https://github.com/colgm/colgm.git
🎨 update
This commit is contained in:
parent
d7fe5f3631
commit
f15ff19517
|
@ -23,6 +23,7 @@ pub enum ast_kind {
|
|||
ast_init_pair,
|
||||
ast_initializer,
|
||||
ast_call_path,
|
||||
ast_call_id,
|
||||
ast_call,
|
||||
ast_assignment,
|
||||
ast_type_def,
|
||||
|
|
|
@ -22,6 +22,8 @@ impl file_lines {
|
|||
}
|
||||
|
||||
pub func delete(self) {
|
||||
io::stderr().out("[").green().out("report").reset()
|
||||
.out("] delete file_lines::source\n");
|
||||
for (var index = 0; index < self->size; index += 1) {
|
||||
self->source[index].delete();
|
||||
}
|
||||
|
@ -90,14 +92,8 @@ impl report {
|
|||
self->filename.clear();
|
||||
self->filename.append_i8_vec(filename);
|
||||
|
||||
var another = str::new();
|
||||
io::stdout().out("read into report::another\n");
|
||||
readfile_into_string(filename, another);
|
||||
io::stdout().out("\n");
|
||||
var source = str::new();
|
||||
io::stdout().out("read into report::source\n");
|
||||
readfile_into_string(filename, source);
|
||||
io::stdout().out("\n");
|
||||
|
||||
var tmp = str::new();
|
||||
for (var pos = 0 => u64; pos < source->size; pos += 1 => u64) {
|
||||
|
|
|
@ -890,15 +890,27 @@ impl lexer {
|
|||
}
|
||||
}
|
||||
|
||||
func generate_eof_token(self) {
|
||||
var eof_span = span::new();
|
||||
eof_span->file.copy_const(self->filename.c_str);
|
||||
eof_span->begin_line = self->line;
|
||||
eof_span->begin_column = self->column;
|
||||
eof_span->end_line = self->line;
|
||||
eof_span->end_column = self->column;
|
||||
self->toks->append(
|
||||
tok_kind::tok_eof,
|
||||
str::new()->append_i8_vec("<eof>"),
|
||||
eof_span
|
||||
);
|
||||
}
|
||||
|
||||
pub func scan(self, filename: i8*) {
|
||||
self->filename.clear();
|
||||
self->filename.copy_const(filename);
|
||||
self->err->load_file_source(filename);
|
||||
|
||||
var src = str::new();
|
||||
io::stdout().out("read into lexer::scan::src\n");
|
||||
readfile_into_string(filename, src);
|
||||
io::stdout().out("\n");
|
||||
|
||||
self->pos = 0 => u64;
|
||||
self->line = 0;
|
||||
|
@ -938,21 +950,11 @@ impl lexer {
|
|||
}
|
||||
}
|
||||
|
||||
var eof_span = span::new();
|
||||
eof_span->file.copy_const(self->filename.c_str);
|
||||
eof_span->begin_line = self->line;
|
||||
eof_span->begin_column = self->column;
|
||||
eof_span->end_line = self->line;
|
||||
eof_span->end_column = self->column;
|
||||
self->toks->append(
|
||||
tok_kind::tok_eof,
|
||||
str::new()->append_i8_vec("<eof>"),
|
||||
eof_span
|
||||
);
|
||||
self->generate_eof_token();
|
||||
|
||||
// lifetime end for src
|
||||
src->delete();
|
||||
free(src => i8*);
|
||||
io::stdout().out("delete lexer::scan::src\n");
|
||||
}
|
||||
|
||||
pub func dump(self) {
|
||||
|
|
|
@ -15,7 +15,8 @@ pub enum flag {
|
|||
|
||||
pub func readfile_into_string(filename: i8*, dst: str*) -> i64 {
|
||||
io::stdout().out("remained info size: ")
|
||||
.out_i64(dst->size => i64).out("\n");
|
||||
.out_i64(dst->size => i64)
|
||||
.out(" in 0x").out_hex(dst->c_str => i64).out("\n");
|
||||
var fd = open(filename, (flag::O_RDONLY => i32) | (flag::O_BINARY => i32));
|
||||
if (fd < (0 => i32)) {
|
||||
return fd => i64;
|
||||
|
|
Loading…
Reference in New Issue