upgrade to the new lexer complete
This commit is contained in:
parent
7ec039edfd
commit
c9af44f0e7
|
@ -378,8 +378,8 @@ default_keys(Bind_Helper *context){
|
|||
bind(context, 's', MDFR_CTRL, cmdid_save);
|
||||
bind(context, 'u', MDFR_CTRL, to_uppercase);
|
||||
bind(context, 'U', MDFR_CTRL, rewrite_as_single_caps);
|
||||
bind(context, 'v', MDFR_CTRL, paste);
|
||||
bind(context, 'V', MDFR_CTRL, paste_next);
|
||||
bind(context, 'v', MDFR_CTRL, paste_and_indent);
|
||||
bind(context, 'V', MDFR_CTRL, paste_next_and_indent);
|
||||
bind(context, 'x', MDFR_CTRL, cut);
|
||||
bind(context, 'y', MDFR_CTRL, cmdid_redo);
|
||||
bind(context, 'z', MDFR_CTRL, cmdid_undo);
|
||||
|
|
|
@ -15,6 +15,10 @@
|
|||
# define DEFAULT_INDENT_FLAGS 0
|
||||
#endif
|
||||
|
||||
#ifndef DEF_TAB_WIDTH
|
||||
# define DEF_TAB_WIDTH 4
|
||||
#endif
|
||||
|
||||
|
||||
//
|
||||
// Memory
|
||||
|
@ -694,6 +698,205 @@ CUSTOM_COMMAND_SIG(move_right){
|
|||
true);
|
||||
}
|
||||
|
||||
//
|
||||
// Auto Indenting and Whitespace
|
||||
//
|
||||
|
||||
static int
|
||||
seek_line_end(Application_Links *app, Buffer_Summary *buffer, int pos){
|
||||
char chunk[1024];
|
||||
int chunk_size = sizeof(chunk);
|
||||
Stream_Chunk stream = {0};
|
||||
|
||||
int still_looping;
|
||||
char at_pos;
|
||||
|
||||
if (init_stream_chunk(&stream, app, buffer, pos, chunk, chunk_size)){
|
||||
still_looping = 1;
|
||||
do{
|
||||
for (; pos < stream.end; ++pos){
|
||||
at_pos = stream.data[pos];
|
||||
if (at_pos == '\n'){
|
||||
goto double_break;
|
||||
}
|
||||
}
|
||||
still_looping = forward_stream_chunk(&stream);
|
||||
}while(still_looping);
|
||||
double_break:;
|
||||
|
||||
if (pos > buffer->size){
|
||||
pos = buffer->size;
|
||||
}
|
||||
}
|
||||
|
||||
return(pos);
|
||||
}
|
||||
|
||||
static int
|
||||
seek_line_beginning(Application_Links *app, Buffer_Summary *buffer, int pos){
|
||||
char chunk[1024];
|
||||
int chunk_size = sizeof(chunk);
|
||||
Stream_Chunk stream = {0};
|
||||
|
||||
int still_looping;
|
||||
char at_pos;
|
||||
|
||||
--pos;
|
||||
if (init_stream_chunk(&stream, app, buffer, pos, chunk, chunk_size)){
|
||||
still_looping = 1;
|
||||
do{
|
||||
for (; pos >= stream.start; --pos){
|
||||
at_pos = stream.data[pos];
|
||||
if (at_pos == '\n'){
|
||||
goto double_break;
|
||||
}
|
||||
}
|
||||
still_looping = backward_stream_chunk(&stream);
|
||||
}while(still_looping);
|
||||
double_break:;
|
||||
|
||||
if (pos != 0){
|
||||
++pos;
|
||||
}
|
||||
if (pos < 0){
|
||||
pos = 0;
|
||||
}
|
||||
}
|
||||
|
||||
return(pos);
|
||||
}
|
||||
|
||||
static void
|
||||
move_past_lead_whitespace(Application_Links *app, View_Summary *view, Buffer_Summary *buffer){
|
||||
refresh_view(app, view);
|
||||
|
||||
int new_pos = seek_line_beginning(app, buffer, view->cursor.pos);
|
||||
char space[1024];
|
||||
Stream_Chunk chunk = {0};
|
||||
int still_looping = false;
|
||||
|
||||
int i = new_pos;
|
||||
if (init_stream_chunk(&chunk, app, buffer, i, space, sizeof(space))){
|
||||
do{
|
||||
for (; i < chunk.end; ++i){
|
||||
char at_pos = chunk.data[i];
|
||||
if (at_pos == '\n' || !char_is_whitespace(at_pos)){
|
||||
goto break2;
|
||||
}
|
||||
}
|
||||
still_looping = forward_stream_chunk(&chunk);
|
||||
}while(still_looping);
|
||||
break2:;
|
||||
|
||||
if (i > view->cursor.pos){
|
||||
app->view_set_cursor(app, view, seek_pos(i), true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
CUSTOM_COMMAND_SIG(auto_tab_line_at_cursor){
|
||||
unsigned int access = AccessOpen;
|
||||
View_Summary view = app->get_active_view(app, access);
|
||||
Buffer_Summary buffer = app->get_buffer(app, view.buffer_id, access);
|
||||
|
||||
app->buffer_auto_indent(app, &buffer,
|
||||
view.cursor.pos, view.cursor.pos,
|
||||
DEF_TAB_WIDTH,
|
||||
DEFAULT_INDENT_FLAGS);
|
||||
move_past_lead_whitespace(app, &view, &buffer);
|
||||
}
|
||||
|
||||
CUSTOM_COMMAND_SIG(auto_tab_whole_file){
|
||||
unsigned int access = AccessOpen;
|
||||
View_Summary view = app->get_active_view(app, access);
|
||||
Buffer_Summary buffer = app->get_buffer(app, view.buffer_id, access);
|
||||
|
||||
app->buffer_auto_indent(app, &buffer,
|
||||
0, buffer.size,
|
||||
DEF_TAB_WIDTH,
|
||||
DEFAULT_INDENT_FLAGS);
|
||||
}
|
||||
|
||||
CUSTOM_COMMAND_SIG(auto_tab_range){
|
||||
unsigned int access = AccessOpen;
|
||||
View_Summary view = app->get_active_view(app, access);
|
||||
Buffer_Summary buffer = app->get_buffer(app, view.buffer_id, access);
|
||||
Range range = get_range(&view);
|
||||
|
||||
app->buffer_auto_indent(app, &buffer,
|
||||
range.min, range.max,
|
||||
DEF_TAB_WIDTH,
|
||||
DEFAULT_INDENT_FLAGS);
|
||||
move_past_lead_whitespace(app, &view, &buffer);
|
||||
}
|
||||
|
||||
CUSTOM_COMMAND_SIG(write_and_auto_tab){
|
||||
exec_command(app, write_character);
|
||||
exec_command(app, auto_tab_line_at_cursor);
|
||||
}
|
||||
|
||||
CUSTOM_COMMAND_SIG(clean_all_lines){
|
||||
// TODO(allen): This command always iterates accross the entire
|
||||
// buffer, so streaming it is actually the wrong call. Rewrite this
|
||||
// to minimize calls to app->buffer_read_range.
|
||||
View_Summary view = app->get_active_view(app, AccessOpen);
|
||||
Buffer_Summary buffer = app->get_buffer(app, view.buffer_id, AccessOpen);
|
||||
|
||||
int line_count = buffer.line_count;
|
||||
int edit_max = line_count;
|
||||
|
||||
if (edit_max*sizeof(Buffer_Edit) < app->memory_size){
|
||||
Buffer_Edit *edits = (Buffer_Edit*)app->memory;
|
||||
|
||||
char data[1024];
|
||||
Stream_Chunk chunk = {0};
|
||||
|
||||
int i = 0;
|
||||
if (init_stream_chunk(&chunk, app, &buffer,
|
||||
i, data, sizeof(data))){
|
||||
Buffer_Edit *edit = edits;
|
||||
|
||||
int buffer_size = buffer.size;
|
||||
int still_looping = true;
|
||||
int last_hard = buffer_size;
|
||||
do{
|
||||
for (; i < chunk.end; ++i){
|
||||
char at_pos = chunk.data[i];
|
||||
if (at_pos == '\n'){
|
||||
if (last_hard+1 < i){
|
||||
edit->str_start = 0;
|
||||
edit->len = 0;
|
||||
edit->start = last_hard+1;
|
||||
edit->end = i;
|
||||
++edit;
|
||||
}
|
||||
last_hard = buffer_size;
|
||||
}
|
||||
else if (char_is_whitespace(at_pos)){
|
||||
// NOTE(allen): do nothing
|
||||
}
|
||||
else{
|
||||
last_hard = i;
|
||||
}
|
||||
}
|
||||
|
||||
still_looping = forward_stream_chunk(&chunk);
|
||||
}while(still_looping);
|
||||
|
||||
if (last_hard+1 < buffer_size){
|
||||
edit->str_start = 0;
|
||||
edit->len = 0;
|
||||
edit->start = last_hard+1;
|
||||
edit->end = buffer_size;
|
||||
++edit;
|
||||
}
|
||||
|
||||
int edit_count = (int)(edit - edits);
|
||||
app->buffer_batch_edit(app, &buffer, 0, 0, edits, edit_count, BatchEdit_PreserveTokens);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//
|
||||
// Clipboard
|
||||
//
|
||||
|
@ -843,6 +1046,16 @@ CUSTOM_COMMAND_SIG(paste_next){
|
|||
}
|
||||
}
|
||||
|
||||
CUSTOM_COMMAND_SIG(paste_and_indent){
|
||||
exec_command(app, paste);
|
||||
exec_command(app, auto_tab_range);
|
||||
}
|
||||
|
||||
CUSTOM_COMMAND_SIG(paste_next_and_indent){
|
||||
exec_command(app, paste_next);
|
||||
exec_command(app, auto_tab_range);
|
||||
}
|
||||
|
||||
//
|
||||
// Fancy Editing
|
||||
//
|
||||
|
@ -1032,70 +1245,6 @@ CUSTOM_COMMAND_SIG(seek_whitespace_down){
|
|||
true);
|
||||
}
|
||||
|
||||
static int
|
||||
seek_line_end(Application_Links *app, Buffer_Summary *buffer, int pos){
|
||||
char chunk[1024];
|
||||
int chunk_size = sizeof(chunk);
|
||||
Stream_Chunk stream = {0};
|
||||
|
||||
int still_looping;
|
||||
char at_pos;
|
||||
|
||||
if (init_stream_chunk(&stream, app, buffer, pos, chunk, chunk_size)){
|
||||
still_looping = 1;
|
||||
do{
|
||||
for (; pos < stream.end; ++pos){
|
||||
at_pos = stream.data[pos];
|
||||
if (at_pos == '\n'){
|
||||
goto double_break;
|
||||
}
|
||||
}
|
||||
still_looping = forward_stream_chunk(&stream);
|
||||
}while(still_looping);
|
||||
double_break:;
|
||||
|
||||
if (pos > buffer->size){
|
||||
pos = buffer->size;
|
||||
}
|
||||
}
|
||||
|
||||
return(pos);
|
||||
}
|
||||
|
||||
static int
|
||||
seek_line_beginning(Application_Links *app, Buffer_Summary *buffer, int pos){
|
||||
char chunk[1024];
|
||||
int chunk_size = sizeof(chunk);
|
||||
Stream_Chunk stream = {0};
|
||||
|
||||
int still_looping;
|
||||
char at_pos;
|
||||
|
||||
--pos;
|
||||
if (init_stream_chunk(&stream, app, buffer, pos, chunk, chunk_size)){
|
||||
still_looping = 1;
|
||||
do{
|
||||
for (; pos >= stream.start; --pos){
|
||||
at_pos = stream.data[pos];
|
||||
if (at_pos == '\n'){
|
||||
goto double_break;
|
||||
}
|
||||
}
|
||||
still_looping = backward_stream_chunk(&stream);
|
||||
}while(still_looping);
|
||||
double_break:;
|
||||
|
||||
if (pos != 0){
|
||||
++pos;
|
||||
}
|
||||
if (pos < 0){
|
||||
pos = 0;
|
||||
}
|
||||
}
|
||||
|
||||
return(pos);
|
||||
}
|
||||
|
||||
CUSTOM_COMMAND_SIG(seek_end_of_line){
|
||||
unsigned int access = AccessProtected;
|
||||
View_Summary view = app->get_active_view(app, access);
|
||||
|
@ -1163,38 +1312,6 @@ CUSTOM_COMMAND_SIG(write_increment){
|
|||
write_string(app, make_lit_string("++"));
|
||||
}
|
||||
|
||||
#ifndef DEF_TAB_WIDTH
|
||||
# define DEF_TAB_WIDTH 4
|
||||
#endif
|
||||
|
||||
static void
|
||||
move_past_lead_whitespace(Application_Links *app, View_Summary *view, Buffer_Summary *buffer){
|
||||
refresh_view(app, view);
|
||||
|
||||
int new_pos = seek_line_beginning(app, buffer, view->cursor.pos);
|
||||
char space[1024];
|
||||
Stream_Chunk chunk = {0};
|
||||
int still_looping = false;
|
||||
|
||||
int i = new_pos;
|
||||
if (init_stream_chunk(&chunk, app, buffer, i, space, sizeof(space))){
|
||||
do{
|
||||
for (; i < chunk.end; ++i){
|
||||
char at_pos = chunk.data[i];
|
||||
if (at_pos == '\n' || !char_is_whitespace(at_pos)){
|
||||
goto break2;
|
||||
}
|
||||
}
|
||||
still_looping = forward_stream_chunk(&chunk);
|
||||
}while(still_looping);
|
||||
break2:;
|
||||
|
||||
if (i > view->cursor.pos){
|
||||
app->view_set_cursor(app, view, seek_pos(i), true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static void
|
||||
long_braces(Application_Links *app, char *text, int size){
|
||||
unsigned int access = AccessOpen;
|
||||
|
@ -1873,113 +1990,6 @@ CUSTOM_COMMAND_SIG(execute_previous_cli){
|
|||
}
|
||||
}
|
||||
|
||||
//
|
||||
// Auto Indenting and Whitespace
|
||||
//
|
||||
|
||||
CUSTOM_COMMAND_SIG(auto_tab_line_at_cursor){
|
||||
unsigned int access = AccessOpen;
|
||||
View_Summary view = app->get_active_view(app, access);
|
||||
Buffer_Summary buffer = app->get_buffer(app, view.buffer_id, access);
|
||||
|
||||
app->buffer_auto_indent(app, &buffer,
|
||||
view.cursor.pos, view.cursor.pos,
|
||||
DEF_TAB_WIDTH,
|
||||
DEFAULT_INDENT_FLAGS);
|
||||
move_past_lead_whitespace(app, &view, &buffer);
|
||||
}
|
||||
|
||||
CUSTOM_COMMAND_SIG(auto_tab_whole_file){
|
||||
unsigned int access = AccessOpen;
|
||||
View_Summary view = app->get_active_view(app, access);
|
||||
Buffer_Summary buffer = app->get_buffer(app, view.buffer_id, access);
|
||||
|
||||
app->buffer_auto_indent(app, &buffer,
|
||||
0, buffer.size,
|
||||
DEF_TAB_WIDTH,
|
||||
DEFAULT_INDENT_FLAGS);
|
||||
}
|
||||
|
||||
CUSTOM_COMMAND_SIG(auto_tab_range){
|
||||
unsigned int access = AccessOpen;
|
||||
View_Summary view = app->get_active_view(app, access);
|
||||
Buffer_Summary buffer = app->get_buffer(app, view.buffer_id, access);
|
||||
Range range = get_range(&view);
|
||||
|
||||
app->buffer_auto_indent(app, &buffer,
|
||||
range.min, range.max,
|
||||
DEF_TAB_WIDTH,
|
||||
DEFAULT_INDENT_FLAGS);
|
||||
move_past_lead_whitespace(app, &view, &buffer);
|
||||
}
|
||||
|
||||
CUSTOM_COMMAND_SIG(write_and_auto_tab){
|
||||
exec_command(app, write_character);
|
||||
exec_command(app, auto_tab_line_at_cursor);
|
||||
}
|
||||
|
||||
CUSTOM_COMMAND_SIG(clean_all_lines){
|
||||
// TODO(allen): This command always iterates accross the entire
|
||||
// buffer, so streaming it is actually the wrong call. Rewrite this
|
||||
// to minimize calls to app->buffer_read_range.
|
||||
View_Summary view = app->get_active_view(app, AccessOpen);
|
||||
Buffer_Summary buffer = app->get_buffer(app, view.buffer_id, AccessOpen);
|
||||
|
||||
int line_count = buffer.line_count;
|
||||
int edit_max = line_count;
|
||||
|
||||
if (edit_max*sizeof(Buffer_Edit) < app->memory_size){
|
||||
Buffer_Edit *edits = (Buffer_Edit*)app->memory;
|
||||
|
||||
char data[1024];
|
||||
Stream_Chunk chunk = {0};
|
||||
|
||||
int i = 0;
|
||||
if (init_stream_chunk(&chunk, app, &buffer,
|
||||
i, data, sizeof(data))){
|
||||
Buffer_Edit *edit = edits;
|
||||
|
||||
int buffer_size = buffer.size;
|
||||
int still_looping = true;
|
||||
int last_hard = buffer_size;
|
||||
do{
|
||||
for (; i < chunk.end; ++i){
|
||||
char at_pos = chunk.data[i];
|
||||
if (at_pos == '\n'){
|
||||
if (last_hard+1 < i){
|
||||
edit->str_start = 0;
|
||||
edit->len = 0;
|
||||
edit->start = last_hard+1;
|
||||
edit->end = i;
|
||||
++edit;
|
||||
}
|
||||
last_hard = buffer_size;
|
||||
}
|
||||
else if (char_is_whitespace(at_pos)){
|
||||
// NOTE(allen): do nothing
|
||||
}
|
||||
else{
|
||||
last_hard = i;
|
||||
}
|
||||
}
|
||||
|
||||
still_looping = forward_stream_chunk(&chunk);
|
||||
}while(still_looping);
|
||||
|
||||
if (last_hard+1 < buffer_size){
|
||||
edit->str_start = 0;
|
||||
edit->len = 0;
|
||||
edit->start = last_hard+1;
|
||||
edit->end = buffer_size;
|
||||
++edit;
|
||||
}
|
||||
|
||||
int edit_count = (int)(edit - edits);
|
||||
app->buffer_batch_edit(app, &buffer, 0, 0, edits, edit_count, BatchEdit_PreserveTokens);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//
|
||||
// Default Building Stuff
|
||||
//
|
||||
|
|
|
@ -14,15 +14,6 @@ jump_to_location(Application_Links *app, View_Summary *view, Jump_Location *l){
|
|||
app->view_set_cursor(app, view, seek_line_char(l->line, l->column), true);
|
||||
}
|
||||
|
||||
static int
|
||||
gcc_style_verify(String line, int colon_pos){
|
||||
int result = false;
|
||||
if (colon_pos < line.size){
|
||||
result = true;
|
||||
}
|
||||
return(result);
|
||||
}
|
||||
|
||||
static int
|
||||
ms_style_verify(String line, int paren_pos){
|
||||
int result = false;
|
||||
|
@ -43,13 +34,16 @@ parse_error(String line, Jump_Location *location,
|
|||
int skip_sub_errors, int *colon_char){
|
||||
int result = false;
|
||||
|
||||
String original_line = line;
|
||||
line = skip_chop_whitespace(line);
|
||||
|
||||
int colon_pos = find(line, 0, ')');
|
||||
if (ms_style_verify(line, colon_pos)){
|
||||
colon_pos = find(line, colon_pos, ':');
|
||||
if (colon_pos < line.size){
|
||||
String location_str = substr(line, 0, colon_pos);
|
||||
|
||||
if (!(skip_sub_errors && line.str[0] == ' ')){
|
||||
if (!(skip_sub_errors && original_line.str[0] == ' ')){
|
||||
location_str = skip_chop_whitespace(location_str);
|
||||
|
||||
int paren_pos = find(location_str, 0, '(');
|
||||
|
@ -92,18 +86,16 @@ parse_error(String line, Jump_Location *location,
|
|||
|
||||
else{
|
||||
int colon_pos1 = find(line, 0, ':');
|
||||
if (colon_pos1 == 1){
|
||||
if (line.size > colon_pos1+1){
|
||||
if (char_is_slash(line.str[colon_pos1+1])){
|
||||
colon_pos1 = find(line, colon_pos1+1, ':');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
int colon_pos2 = find(line, colon_pos1+1, ':');
|
||||
int colon_pos3 = find(line, colon_pos2+1, ':');
|
||||
|
||||
if (gcc_style_verify(line, colon_pos3)){
|
||||
if (colon_pos3 < line.size){
|
||||
String filename = substr(line, 0, colon_pos1);
|
||||
String line_number = substr(line, colon_pos1+1, colon_pos2 - colon_pos1 - 1);
|
||||
String column_number = substr(line, colon_pos2+1, colon_pos3 - colon_pos2 - 1);
|
||||
|
@ -119,8 +111,14 @@ parse_error(String line, Jump_Location *location,
|
|||
}
|
||||
}
|
||||
else{
|
||||
int colon_pos1 = find(line, 0, ':');
|
||||
int colon_pos2 = find(line, colon_pos1+1, ':');
|
||||
colon_pos1 = find(line, 0, ':');
|
||||
if (line.size > colon_pos1+1){
|
||||
if (char_is_slash(line.str[colon_pos1+1])){
|
||||
colon_pos1 = find(line, colon_pos1+1, ':');
|
||||
}
|
||||
}
|
||||
|
||||
colon_pos2 = find(line, colon_pos1+1, ':');
|
||||
|
||||
if (colon_pos2 < line.size){
|
||||
String filename = substr(line, 0, colon_pos1);
|
||||
|
|
|
@ -869,11 +869,11 @@ cpp_read_block_comment(char *data, int size, int pos){
|
|||
if (data[pos] == '*' &&
|
||||
pos + 1 < size &&
|
||||
data[pos+1] == '/'){
|
||||
pos += 2;
|
||||
break;
|
||||
}
|
||||
++pos;
|
||||
}
|
||||
pos += 2;
|
||||
result.token.size = pos - result.token.start;
|
||||
result.token.type = CPP_TOKEN_COMMENT;
|
||||
result.pos = pos;
|
||||
|
|
|
@ -27,9 +27,14 @@
|
|||
|
||||
#include "4coder_table.cpp"
|
||||
|
||||
#define USE_NEW_LEXER 1
|
||||
|
||||
#if USE_NEW_LEXER
|
||||
#include "test/4cpp_new_lexer.h"
|
||||
#else
|
||||
#define FCPP_LEXER_IMPLEMENTATION
|
||||
//#include "test/4cpp_new_lexer.h"
|
||||
#include "4cpp_lexer.h"
|
||||
#endif
|
||||
|
||||
#include "4ed_template.cpp"
|
||||
|
||||
|
|
|
@ -1183,7 +1183,7 @@ Job_Callback_Sig(job_full_lex){
|
|||
i32 buffer_size = file->state.buffer.size;
|
||||
buffer_size = (buffer_size + 3)&(~3);
|
||||
|
||||
#if 0
|
||||
#if USE_NEW_LEXER
|
||||
while (memory->size < buffer_size*2){
|
||||
system->grow_thread_memory(memory);
|
||||
}
|
||||
|
@ -1202,7 +1202,7 @@ Job_Callback_Sig(job_full_lex){
|
|||
do{
|
||||
i32 result =
|
||||
cpp_lex_size_nonalloc(&lex,
|
||||
cpp_file.data, cpp_file.size, cpp_file.size,
|
||||
text_data, text_size, text_size,
|
||||
&tokens, 2048);
|
||||
|
||||
switch (result){
|
||||
|
@ -1376,9 +1376,12 @@ file_relex_parallel(System_Functions *system,
|
|||
relex_space.max_count = state.space_request;
|
||||
relex_space.tokens = push_array(part, Cpp_Token, relex_space.max_count);
|
||||
|
||||
// char *spare = push_array(part, char, cpp_file.size);
|
||||
// if (cpp_relex_nonalloc_main(&state, &relex_space, &relex_end, spare)){
|
||||
#if USE_NEW_LEXER
|
||||
char *spare = push_array(part, char, size);
|
||||
if (cpp_relex_nonalloc_main(&state, &relex_space, &relex_end, spare)){
|
||||
#else
|
||||
if (cpp_relex_nonalloc_main(&state, &relex_space, &relex_end)){
|
||||
#endif
|
||||
inline_lex = 0;
|
||||
}
|
||||
else{
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
@echo off
|
||||
|
||||
REM "build_exp.bat" /Zi
|
||||
REM "build_exp.bat" /O2
|
||||
"build_all.bat" /DFRED_SUPER /DFRED_INTERNAL /Zi
|
||||
REM "build_all.bat" /DFRED_INTERNAL /Zi
|
||||
REM "build_all.bat" /O2 /Zi
|
||||
|
|
|
@ -220,7 +220,6 @@ cpp_shift_token_starts(Cpp_Token_Stack *stack, int from_token_i, int shift_amoun
|
|||
enum Pos_Update_Rule{
|
||||
PUR_none,
|
||||
PUR_back_one,
|
||||
PUR_unget_whitespace,
|
||||
};
|
||||
|
||||
lexer_link Lex_PP_State
|
||||
|
@ -385,6 +384,7 @@ cpp_lex_nonalloc(Lex_Data *S_ptr,
|
|||
DrCase(4);
|
||||
DrCase(5);
|
||||
DrCase(6);
|
||||
DrCase(7);
|
||||
}
|
||||
|
||||
for (;;){
|
||||
|
@ -410,6 +410,8 @@ cpp_lex_nonalloc(Lex_Data *S_ptr,
|
|||
S.pp_state -= LSPP_count;
|
||||
}
|
||||
|
||||
S.token.state_flags = S.pp_state;
|
||||
|
||||
S.token_start = S.pos;
|
||||
S.tb_pos = 0;
|
||||
S.fsm = zero_lex_fsm();
|
||||
|
@ -438,7 +440,10 @@ cpp_lex_nonalloc(Lex_Data *S_ptr,
|
|||
|
||||
Assert(S.fsm.emit_token == 1);
|
||||
|
||||
if (c != 0){
|
||||
if (c == 0){
|
||||
S.completed = 1;
|
||||
}
|
||||
|
||||
if (S.fsm.state >= LS_count) S.fsm.state -= LS_count;
|
||||
pos_update_rule = PUR_none;
|
||||
if (S.pp_state == LSPP_include){
|
||||
|
@ -461,6 +466,8 @@ cpp_lex_nonalloc(Lex_Data *S_ptr,
|
|||
switch (S.fsm.state){
|
||||
case LS_default:
|
||||
switch (c){
|
||||
case 0: S.fsm.emit_token = 0; break;
|
||||
|
||||
#define OperCase(op,t) case op: S.token.type = t; break;
|
||||
OperCase('{', CPP_TOKEN_BRACE_OPEN);
|
||||
OperCase('}', CPP_TOKEN_BRACE_CLOSE);
|
||||
|
@ -666,17 +673,21 @@ cpp_lex_nonalloc(Lex_Data *S_ptr,
|
|||
|
||||
case LS_comment:
|
||||
case LS_comment_slashed:
|
||||
S.token.type = CPP_TOKEN_COMMENT;
|
||||
S.token.flags = 0;
|
||||
pos_update_rule = PUR_back_one;
|
||||
break;
|
||||
|
||||
case LS_comment_block:
|
||||
case LS_comment_block_ending:
|
||||
S.token.type = CPP_TOKEN_COMMENT;
|
||||
S.token.flags = 0;
|
||||
pos_update_rule = PUR_unget_whitespace;
|
||||
break;
|
||||
|
||||
case LS_error_message:
|
||||
S.token.type = CPP_TOKEN_ERROR_MESSAGE;
|
||||
S.token.flags = 0;
|
||||
pos_update_rule = PUR_unget_whitespace;
|
||||
pos_update_rule = PUR_back_one;
|
||||
break;
|
||||
|
||||
case LS_dot:
|
||||
|
@ -879,12 +890,10 @@ cpp_lex_nonalloc(Lex_Data *S_ptr,
|
|||
--S.pos;
|
||||
break;
|
||||
|
||||
case PUR_unget_whitespace:
|
||||
c = chunk[--S.pos];
|
||||
while (c == ' ' || c == '\n' || c == '\t' || c == '\r' || c == '\v' || c == '\f'){
|
||||
c = chunk[--S.pos];
|
||||
default:
|
||||
if (chunk[S.pos-1] == 0){
|
||||
--S.pos;
|
||||
}
|
||||
++S.pos;
|
||||
break;
|
||||
}
|
||||
|
||||
|
@ -943,17 +952,18 @@ cpp_lex_nonalloc(Lex_Data *S_ptr,
|
|||
if ((S.token.flags & CPP_TFLAG_PP_DIRECTIVE) == 0){
|
||||
S.token.flags |= (S.pp_state != LSPP_default)?(CPP_TFLAG_PP_BODY):(0);
|
||||
}
|
||||
S.token.state_flags = S.pp_state;
|
||||
|
||||
token_i = cpp_place_token_nonalloc(out_tokens, token_i, S.token);
|
||||
if (token_i == max_token_i){
|
||||
if (S.pos == end_pos){
|
||||
S.chunk_pos += size;
|
||||
DrYield(7, LexNeedChunk);
|
||||
}
|
||||
DrYield(2, LexNeedTokenMemory);
|
||||
}
|
||||
}
|
||||
}
|
||||
// NOTE(allen): else case for "if (c != 0) {...}
|
||||
else{
|
||||
S.completed = 1;
|
||||
|
||||
if (S.completed){
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -206,6 +206,19 @@ end_t(Times *t){
|
|||
*t = time;
|
||||
}
|
||||
|
||||
static int
|
||||
equivalent_comments(Cpp_Token *a, Cpp_Token *b, char *data){
|
||||
String s_a = make_string(data + a->start, a->size);
|
||||
String s_b = make_string(data + b->start, b->size);
|
||||
|
||||
s_a = skip_chop_whitespace(s_a);
|
||||
s_b = skip_chop_whitespace(s_b);
|
||||
|
||||
int result = match(s_a, s_b);
|
||||
|
||||
return(result);
|
||||
}
|
||||
|
||||
static void
|
||||
run_experiment(Experiment *exp, char *filename, int verbose,
|
||||
int chunks, int max_tokens){
|
||||
|
@ -266,16 +279,17 @@ run_experiment(Experiment *exp, char *filename, int verbose,
|
|||
}
|
||||
else{
|
||||
start = __rdtsc();
|
||||
new_lex::cpp_lex_nonalloc(&ld,
|
||||
(char*)file_data.data, file_data.size,
|
||||
&exp->testing_stack);
|
||||
new_lex::cpp_lex_size_nonalloc(
|
||||
&ld, (char*)file_data.data, file_data.size,
|
||||
file_data.size, &exp->testing_stack);
|
||||
|
||||
time.fsm += (__rdtsc() - start);
|
||||
}
|
||||
}
|
||||
else{
|
||||
if (chunks){
|
||||
start = __rdtsc();
|
||||
int relevant_size = file_data.size + 1;
|
||||
int relevant_size = file_data.size;
|
||||
is_last = 0;
|
||||
for (k = 0; k < relevant_size; k += chunks){
|
||||
chunk_size = chunks;
|
||||
|
@ -289,7 +303,8 @@ run_experiment(Experiment *exp, char *filename, int verbose,
|
|||
do{
|
||||
result =
|
||||
new_lex::cpp_lex_size_nonalloc(&ld,
|
||||
(char*)file_data.data + k, chunk_size, file_data.size,
|
||||
(char*)file_data.data + k, chunk_size,
|
||||
file_data.size,
|
||||
&exp->testing_stack,
|
||||
max_tokens);
|
||||
if (result == new_lex::LexFinished ||
|
||||
|
@ -348,6 +363,17 @@ run_experiment(Experiment *exp, char *filename, int verbose,
|
|||
}
|
||||
|
||||
if (correct->start != testing->start || correct->size != testing->size){
|
||||
|
||||
int mismatch = 1;
|
||||
if (correct->type == testing->type &&
|
||||
(correct->type == CPP_TOKEN_COMMENT ||
|
||||
correct->type == CPP_TOKEN_ERROR_MESSAGE)){
|
||||
if (equivalent_comments(correct, testing, data)){
|
||||
mismatch = 0;
|
||||
}
|
||||
}
|
||||
|
||||
if (mismatch){
|
||||
pass = 0;
|
||||
if (verbose >= 1){
|
||||
printf("token range mismatch at token %d\n"
|
||||
|
@ -359,6 +385,7 @@ run_experiment(Experiment *exp, char *filename, int verbose,
|
|||
testing->size, data + testing->start);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (correct->flags != testing->flags){
|
||||
pass = 0;
|
||||
|
@ -402,12 +429,12 @@ show_time(Times t, int repeats, char *type){
|
|||
|
||||
int main(){
|
||||
int repeats = 1;
|
||||
int verbose_level = 1;
|
||||
int chunk_start = 64;
|
||||
int chunk_end = 64;
|
||||
#define TEST_FILE "lexer_test2.cpp"
|
||||
#define SINGLE_ITEM 1
|
||||
int token_limit = 1;
|
||||
int verbose_level = -1;
|
||||
int chunk_start = 0;
|
||||
int chunk_end = 16;
|
||||
#define TEST_FILE "parser_test_gcc.cpp"
|
||||
#define SINGLE_ITEM 0
|
||||
int token_limit = 0;
|
||||
|
||||
int chunks = (chunk_start > 0 && chunk_start <= chunk_end);
|
||||
int c = 0;
|
||||
|
|
Loading…
Reference in New Issue