#include <stdlib.h>
#include <errno.h>
-static lex_tok_t* lexer_make_token(char* text);
+static lex_tok_t* lexer_make_token(size_t line, size_t col, char* text);
static lex_tok_t* lexer_punc(char* text);
static lex_tok_t* lexer_char(char* text);
static lex_tok_t* lexer_radix_int(char* text);
lex_tok_t* lexer_read(lexer_t* p_lexer) {
lex_tok_t* p_tok = NULL;
- char* text = scanner_read(p_lexer->scanner);
+ size_t line;
+ size_t col;
+ char* text = scanner_read(p_lexer->scanner, &line, &col);
if (NULL != text) {
- p_tok = lexer_make_token(text);
+ p_tok = lexer_make_token(line, col, text);
free(text);
}
return p_tok;
scanner_getline(p_lexer->scanner);
}
-static lex_tok_t* lexer_make_token(char* text) {
+static lex_tok_t* lexer_make_token(size_t line, size_t col, char* text) {
lex_tok_t* p_tok = NULL;
if (0 == strcmp(text,"end")) {
p_tok = lex_tok_new(T_END, NULL);
} else {
p_tok = lexer_var(text);
}
+ /* If we found a valid token then fill in the location details */
+ if (NULL != p_tok) {
+ p_tok->line = line;
+ p_tok->col = col;
+ }
return p_tok;
}
lex_tok_type_t type;
const char* file;
size_t line;
- size_t column;
+ size_t col;
void* value;
} lex_tok_t;
lexer_t* p_lexer = lexer_new(NULL, input);
lex_tok_t* token;
while(NULL != (token = lexer_read(p_lexer))) {
- pprint_token(output, token);
+ pprint_token(output, token, true);
mem_release(token);
}
mem_release(p_lexer);
}
}
-void pprint_token(FILE* file, lex_tok_t* token)
+void pprint_token(FILE* file, lex_tok_t* token, bool print_loc)
{
+ if (print_loc) {
+ fprintf(file, "%zu:", token->line);
+ fprintf(file, "%zu:", token->col);
+ }
pprint_token_type(file, token);
if (token->type < T_LBRACE) {
fprintf(file, ":");
{
print_indent(file, depth);
if (tree->tag == ATOM) {
- pprint_token(file, tree->ptr.tok);
+ pprint_token(file, tree->ptr.tok, false);
} else {
fputs("(tree", file);
vec_t* p_vec = tree->ptr.vec;
void pprint_token_value(FILE* file, lex_tok_t* token);
-void pprint_token(FILE* file, lex_tok_t* token);
+void pprint_token(FILE* file, lex_tok_t* token, bool print_loc);
void pprint_tree(FILE* file, tree_t* tree, int depth);
scanner_t* p_scanner = (scanner_t*)mem_allocate(sizeof(scanner_t), &scanner_free);
p_scanner->p_line = NULL;
p_scanner->index = 0;
+ p_scanner->line = 0;
p_scanner->p_input = p_file;
p_scanner->p_prompt = p_prompt;
return p_scanner;
}
-char* scanner_read(scanner_t* p_scanner) {
+char* scanner_read(scanner_t* p_scanner, size_t* line, size_t* column) {
char* p_tok = NULL;
scanner_skip_ws(p_scanner);
+ *line = p_scanner->line;
+ *column = p_scanner->index+1;
if (!scanner_eof(p_scanner)) {
if (scanner_oneof(p_scanner, "()[]{};,'")) {
p_tok = scanner_dup(p_scanner, p_scanner->index, 1);
p_scanner->p_line[index++] = (c == EOF) ? '\0' : c;
p_scanner->p_line[index++] = '\0';
p_scanner->index = 0;
+ /* Increment line count */
+ p_scanner->line++;
}
}
typedef struct {
char* p_line;
size_t index;
+ size_t line;
FILE* p_input;
char* p_prompt;
} scanner_t;
scanner_t* scanner_new(char* p_prompt, FILE* p_file);
-char* scanner_read(scanner_t* p_scanner);
+char* scanner_read(scanner_t* p_scanner, size_t* line, size_t* col);
bool scanner_eof(scanner_t* p_scanner);
end
def lexer(input)
- cli(['--tokens'], input).scan(/^(T_[A-Z]+(:("[^"]*"|[^\n]+))?)/m).map {|m| m[0] }
+ cli(['--tokens'], input).scan(/^\d+:\d+:(T_[A-Z]+(:("[^"]*"|[^\n]+))?)/m).map {|m| m[0] }
end
def re_structure( token_array, offset = 0 )