wrote lexer function docs, expanded doc generator
This commit is contained in:
parent
161c3cc871
commit
8cf49b566e
411
4coder_API.html
411
4coder_API.html
File diff suppressed because one or more lines are too long
488
4cpp_lexer.h
488
4cpp_lexer.h
|
@ -12,8 +12,9 @@
|
||||||
# define FCPP_LINK static
|
# define FCPP_LINK static
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
#define FCPP_INTERNAL FCPP_LINK
|
||||||
|
|
||||||
#include "4cpp_lexer_types.h"
|
#include "4cpp_lexer_types.h"
|
||||||
#include "4cpp_lexer_fsms.h"
|
|
||||||
#include "4cpp_lexer_tables.c"
|
#include "4cpp_lexer_tables.c"
|
||||||
|
|
||||||
// TODO(allen): revisit this keyword data declaration system
|
// TODO(allen): revisit this keyword data declaration system
|
||||||
|
@ -145,12 +146,26 @@ static String_And_Flag keywords[] = {
|
||||||
|
|
||||||
|
|
||||||
FCPP_LINK Cpp_Get_Token_Result
|
FCPP_LINK Cpp_Get_Token_Result
|
||||||
cpp_get_token(Cpp_Token_Stack *token_stack, int32_t pos){
|
cpp_get_token(Cpp_Token_Array *token_array_in, int32_t pos)/*
|
||||||
|
DOC_PARAM(token_array, The array of tokens from which to get a token.)
|
||||||
|
DOC_PARAM(pos, The position, measured in bytes, to get the token for.)
|
||||||
|
DOC_RETURN(A Cpp_Get_Token_Result struct is returned containing the index
|
||||||
|
of a token and a flag indicating whether the pos is contained in the token
|
||||||
|
or in whitespace after the token.)
|
||||||
|
|
||||||
|
DOC(This call performs a binary search over all of the tokens looking
|
||||||
|
for the token that contains the specified position. If the position
|
||||||
|
is in whitespace between the tokens, the returned token index is the
|
||||||
|
index of the token immediately before the provided position. The returned
|
||||||
|
index can be -1 if the position is before the first token.)
|
||||||
|
|
||||||
|
DOC_SEE(Cpp_Get_Token_Result)
|
||||||
|
*/{
|
||||||
Cpp_Get_Token_Result result = {};
|
Cpp_Get_Token_Result result = {};
|
||||||
Cpp_Token *token_array = token_stack->tokens;
|
Cpp_Token *token_array = token_array_in->tokens;
|
||||||
Cpp_Token *token = 0;
|
Cpp_Token *token = 0;
|
||||||
int32_t first = 0;
|
int32_t first = 0;
|
||||||
int32_t count = token_stack->count;
|
int32_t count = token_array_in->count;
|
||||||
int32_t last = count;
|
int32_t last = count;
|
||||||
int32_t this_start = 0, next_start = 0;
|
int32_t this_start = 0, next_start = 0;
|
||||||
|
|
||||||
|
@ -200,19 +215,23 @@ cpp_get_token(Cpp_Token_Stack *token_stack, int32_t pos){
|
||||||
return(result);
|
return(result);
|
||||||
}
|
}
|
||||||
|
|
||||||
FCPP_LINK void
|
FCPP_LINK Cpp_Lex_Data
|
||||||
cpp_shift_token_starts(Cpp_Token_Stack *stack, int32_t from_token_i, int32_t shift_amount){
|
cpp_lex_data_init(char *mem_buffer)/*
|
||||||
Cpp_Token *token = stack->tokens + from_token_i;
|
DOC_PARAM(tb, The memory to use for initializing the lex state's temp memory buffer.)
|
||||||
int32_t count = stack->count, i;
|
DOC_RETURN(A brand new lex state ready to begin lexing a file from the beginning.)
|
||||||
|
DOC(Creates a new lex state in the form of a Cpp_Lex_Data struct and returns the struct.
|
||||||
for (i = from_token_i; i < count; ++i, ++token){
|
The system needs a temporary buffer that is as long as the longest token. 4096 is usually
|
||||||
token->start += shift_amount;
|
enough but the buffer is not checked, so to be 100% bullet proof it has to be the same length
|
||||||
}
|
as the file being lexed.)
|
||||||
|
*/{
|
||||||
|
Cpp_Lex_Data data = {0};
|
||||||
|
data.tb = mem_buffer;
|
||||||
|
return(data);
|
||||||
}
|
}
|
||||||
|
|
||||||
FCPP_LINK Lex_PP_State
|
FCPP_INTERNAL Cpp_Lex_PP_State
|
||||||
cpp_pp_directive_to_state(Cpp_Token_Type type){
|
cpp_pp_directive_to_state(Cpp_Token_Type type){
|
||||||
Lex_PP_State result = LSPP_default;
|
Cpp_Lex_PP_State result = LSPP_default;
|
||||||
switch (type){
|
switch (type){
|
||||||
case CPP_PP_INCLUDE:
|
case CPP_PP_INCLUDE:
|
||||||
case CPP_PP_IMPORT:
|
case CPP_PP_IMPORT:
|
||||||
|
@ -256,85 +275,27 @@ cpp_pp_directive_to_state(Cpp_Token_Type type){
|
||||||
return(result);
|
return(result);
|
||||||
}
|
}
|
||||||
|
|
||||||
FCPP_LINK int32_t
|
|
||||||
cpp_place_token_nonalloc(Cpp_Token *out_tokens, int32_t token_i, Cpp_Token token){
|
|
||||||
Cpp_Token prev_token = {(Cpp_Token_Type)0};
|
|
||||||
|
|
||||||
if (token_i > 0){
|
|
||||||
prev_token = out_tokens[token_i - 1];
|
|
||||||
}
|
|
||||||
|
|
||||||
out_tokens[token_i++] = token;
|
|
||||||
|
|
||||||
return(token_i);
|
|
||||||
}
|
|
||||||
|
|
||||||
FCPP_LINK bool
|
|
||||||
cpp_push_token_nonalloc(Cpp_Token_Stack *out_tokens, Cpp_Token token){
|
|
||||||
bool result = 0;
|
|
||||||
if (out_tokens->count == out_tokens->max_count){
|
|
||||||
out_tokens->count =
|
|
||||||
cpp_place_token_nonalloc(out_tokens->tokens, out_tokens->count, token);
|
|
||||||
result = 1;
|
|
||||||
}
|
|
||||||
return(result);
|
|
||||||
}
|
|
||||||
|
|
||||||
struct Lex_Data{
|
|
||||||
char *tb;
|
|
||||||
int32_t tb_pos;
|
|
||||||
int32_t token_start;
|
|
||||||
|
|
||||||
int32_t pos;
|
|
||||||
int32_t pos_overide;
|
|
||||||
int32_t chunk_pos;
|
|
||||||
|
|
||||||
Lex_FSM fsm;
|
|
||||||
unsigned char white_done;
|
|
||||||
unsigned char pp_state;
|
|
||||||
unsigned char completed;
|
|
||||||
|
|
||||||
Cpp_Token token;
|
|
||||||
|
|
||||||
int32_t __pc__;
|
|
||||||
};
|
|
||||||
|
|
||||||
FCPP_LINK Lex_Data
|
|
||||||
lex_data_init(char *tb){
|
|
||||||
Lex_Data data = {0};
|
|
||||||
data.tb = tb;
|
|
||||||
return(data);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
#define DrCase(PC) case PC: goto resumespot_##PC
|
#define DrCase(PC) case PC: goto resumespot_##PC
|
||||||
|
|
||||||
#define DrYield(PC, n) { \
|
#define DrYield(PC, n) { \
|
||||||
token_stack_out->count = token_i; \
|
token_array_out->count = token_i; \
|
||||||
*S_ptr = S; S_ptr->__pc__ = PC; return(n); resumespot_##PC:; }
|
*S_ptr = S; S_ptr->__pc__ = PC; return(n); resumespot_##PC:; }
|
||||||
|
|
||||||
#define DrReturn(n) { \
|
#define DrReturn(n) { \
|
||||||
token_stack_out->count = token_i; \
|
token_array_out->count = token_i; \
|
||||||
*S_ptr = S; S_ptr->__pc__ = -1; return(n); }
|
*S_ptr = S; S_ptr->__pc__ = -1; return(n); }
|
||||||
|
|
||||||
enum Lex_Result{
|
enum Lex_Result{
|
||||||
LexFinished,
|
|
||||||
LexNeedChunk,
|
|
||||||
LexNeedTokenMemory,
|
|
||||||
LexHitTokenLimit
|
|
||||||
};
|
};
|
||||||
|
|
||||||
FCPP_LINK int32_t
|
FCPP_INTERNAL Cpp_Lex_Result
|
||||||
cpp_lex_nonalloc(Lex_Data *S_ptr,
|
cpp_lex_nonalloc_null_end_no_limit(Cpp_Lex_Data *S_ptr, char *chunk, int32_t size,
|
||||||
char *chunk, int32_t size,
|
Cpp_Token_Array *token_array_out){
|
||||||
Cpp_Token_Stack *token_stack_out){
|
Cpp_Lex_Data S = *S_ptr;
|
||||||
Lex_Data S = *S_ptr;
|
|
||||||
|
|
||||||
Cpp_Token *out_tokens = token_stack_out->tokens;
|
Cpp_Token *out_tokens = token_array_out->tokens;
|
||||||
int32_t token_i = token_stack_out->count;
|
int32_t token_i = token_array_out->count;
|
||||||
int32_t max_token_i = token_stack_out->max_count;
|
int32_t max_token_i = token_array_out->max_count;
|
||||||
|
|
||||||
//Pos_Update_Rule pos_update_rule = PUR_none;
|
|
||||||
|
|
||||||
char c = 0;
|
char c = 0;
|
||||||
|
|
||||||
|
@ -362,7 +323,7 @@ cpp_lex_nonalloc(Lex_Data *S_ptr,
|
||||||
|
|
||||||
if (S.white_done == 0){
|
if (S.white_done == 0){
|
||||||
S.chunk_pos += size;
|
S.chunk_pos += size;
|
||||||
DrYield(4, LexNeedChunk);
|
DrYield(4, LexResult_NeedChunk);
|
||||||
}
|
}
|
||||||
else break;
|
else break;
|
||||||
}
|
}
|
||||||
|
@ -394,7 +355,7 @@ cpp_lex_nonalloc(Lex_Data *S_ptr,
|
||||||
|
|
||||||
if (S.fsm.emit_token == 0){
|
if (S.fsm.emit_token == 0){
|
||||||
S.chunk_pos += size;
|
S.chunk_pos += size;
|
||||||
DrYield(3, LexNeedChunk);
|
DrYield(3, LexResult_NeedChunk);
|
||||||
}
|
}
|
||||||
else break;
|
else break;
|
||||||
}
|
}
|
||||||
|
@ -449,7 +410,7 @@ cpp_lex_nonalloc(Lex_Data *S_ptr,
|
||||||
|
|
||||||
if (S.white_done == 0){
|
if (S.white_done == 0){
|
||||||
S.chunk_pos += size;
|
S.chunk_pos += size;
|
||||||
DrYield(1, LexNeedChunk);
|
DrYield(1, LexResult_NeedChunk);
|
||||||
}
|
}
|
||||||
else break;
|
else break;
|
||||||
}
|
}
|
||||||
|
@ -560,7 +521,7 @@ cpp_lex_nonalloc(Lex_Data *S_ptr,
|
||||||
|
|
||||||
if (S.fsm.emit_token == 0){
|
if (S.fsm.emit_token == 0){
|
||||||
S.chunk_pos += size;
|
S.chunk_pos += size;
|
||||||
DrYield(5, LexNeedChunk);
|
DrYield(5, LexResult_NeedChunk);
|
||||||
}
|
}
|
||||||
else break;
|
else break;
|
||||||
}
|
}
|
||||||
|
@ -905,13 +866,13 @@ cpp_lex_nonalloc(Lex_Data *S_ptr,
|
||||||
S.token.flags |= (S.pp_state != LSPP_default)?(CPP_TFLAG_PP_BODY):(0);
|
S.token.flags |= (S.pp_state != LSPP_default)?(CPP_TFLAG_PP_BODY):(0);
|
||||||
}
|
}
|
||||||
|
|
||||||
token_i = cpp_place_token_nonalloc(out_tokens, token_i, S.token);
|
out_tokens[token_i++] = S.token;
|
||||||
if (token_i == max_token_i){
|
if (token_i == max_token_i){
|
||||||
if (S.pos == end_pos){
|
if (S.pos == end_pos){
|
||||||
S.chunk_pos += size;
|
S.chunk_pos += size;
|
||||||
DrYield(7, LexNeedChunk);
|
DrYield(7, LexResult_NeedChunk);
|
||||||
}
|
}
|
||||||
DrYield(2, LexNeedTokenMemory);
|
DrYield(2, LexResult_NeedTokenMemory);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -920,109 +881,203 @@ cpp_lex_nonalloc(Lex_Data *S_ptr,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
DrReturn(LexFinished);
|
DrReturn(LexResult_Finished);
|
||||||
}
|
}
|
||||||
|
|
||||||
#undef DrYield
|
#undef DrYield
|
||||||
#undef DrReturn
|
#undef DrReturn
|
||||||
#undef DrCase
|
#undef DrCase
|
||||||
|
|
||||||
FCPP_LINK int32_t
|
FCPP_INTERNAL Cpp_Lex_Result
|
||||||
cpp_lex_nonalloc(Lex_Data *S_ptr,
|
cpp_lex_nonalloc_null_end_out_limit(Cpp_Lex_Data *S_ptr, char *chunk, int32_t size,
|
||||||
char *chunk, int32_t size,
|
Cpp_Token_Array *token_array_out, int32_t max_tokens_out){
|
||||||
Cpp_Token_Stack *token_stack_out, int32_t max_tokens){
|
Cpp_Token_Array temp_array = *token_array_out;
|
||||||
Cpp_Token_Stack temp_stack = *token_stack_out;
|
if (temp_array.max_count > temp_array.count + max_tokens_out){
|
||||||
if (temp_stack.max_count > temp_stack.count + max_tokens){
|
temp_array.max_count = temp_array.count + max_tokens_out;
|
||||||
temp_stack.max_count = temp_stack.count + max_tokens;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
int32_t result = cpp_lex_nonalloc(S_ptr, chunk, size, &temp_stack);
|
Cpp_Lex_Result result = cpp_lex_nonalloc_null_end_no_limit(S_ptr, chunk, size, &temp_array);
|
||||||
|
|
||||||
token_stack_out->count = temp_stack.count;
|
token_array_out->count = temp_array.count;
|
||||||
|
if (result == LexResult_NeedTokenMemory){
|
||||||
if (result == LexNeedTokenMemory){
|
if (token_array_out->count < token_array_out->max_count){
|
||||||
if (token_stack_out->count < token_stack_out->max_count){
|
result = LexResult_HitTokenLimit;
|
||||||
result = LexHitTokenLimit;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return(result);
|
return(result);
|
||||||
}
|
}
|
||||||
|
|
||||||
FCPP_LINK int32_t
|
FCPP_INTERNAL Cpp_Lex_Result
|
||||||
cpp_lex_size_nonalloc(Lex_Data *S_ptr,
|
cpp_lex_nonalloc_no_null_no_limit(Cpp_Lex_Data *S_ptr, char *chunk, int32_t size, int32_t full_size,
|
||||||
char *chunk, int32_t size, int32_t full_size,
|
Cpp_Token_Array *token_array_out){
|
||||||
Cpp_Token_Stack *token_stack_out){
|
Cpp_Lex_Result result = 0;
|
||||||
int32_t result = 0;
|
|
||||||
if (S_ptr->pos >= full_size){
|
if (S_ptr->pos >= full_size){
|
||||||
char end_null = 0;
|
char end_null = 0;
|
||||||
result = cpp_lex_nonalloc(S_ptr, &end_null, 1, token_stack_out);
|
result = cpp_lex_nonalloc_null_end_no_limit(S_ptr, &end_null, 1, token_array_out);
|
||||||
}
|
}
|
||||||
else{
|
else{
|
||||||
result = cpp_lex_nonalloc(S_ptr, chunk, size, token_stack_out);
|
result = cpp_lex_nonalloc_null_end_no_limit(S_ptr, chunk, size, token_array_out);
|
||||||
if (result == LexNeedChunk){
|
if (result == LexResult_NeedChunk){
|
||||||
if (S_ptr->pos >= full_size){
|
if (S_ptr->pos >= full_size){
|
||||||
char end_null = 0;
|
char end_null = 0;
|
||||||
result = cpp_lex_nonalloc(S_ptr, &end_null, 1, token_stack_out);
|
result = cpp_lex_nonalloc_null_end_no_limit(S_ptr, &end_null, 1, token_array_out);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return(result);
|
return(result);
|
||||||
}
|
}
|
||||||
|
|
||||||
FCPP_LINK int32_t
|
FCPP_INTERNAL Cpp_Lex_Result
|
||||||
cpp_lex_size_nonalloc(Lex_Data *S_ptr,
|
cpp_lex_nonalloc_no_null_out_limit(Cpp_Lex_Data *S_ptr, char *chunk, int32_t size, int32_t full_size,
|
||||||
char *chunk, int32_t size, int32_t full_size,
|
Cpp_Token_Array *token_array_out, int32_t max_tokens_out){
|
||||||
Cpp_Token_Stack *token_stack_out, int32_t max_tokens){
|
Cpp_Token_Array temp_stack = *token_array_out;
|
||||||
Cpp_Token_Stack temp_stack = *token_stack_out;
|
if (temp_stack.max_count > temp_stack.count + max_tokens_out){
|
||||||
if (temp_stack.max_count > temp_stack.count + max_tokens){
|
temp_stack.max_count = temp_stack.count + max_tokens_out;
|
||||||
temp_stack.max_count = temp_stack.count + max_tokens;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
int32_t result = cpp_lex_size_nonalloc(S_ptr, chunk, size, full_size,
|
Cpp_Lex_Result result = cpp_lex_nonalloc_no_null_no_limit(S_ptr, chunk, size, full_size,
|
||||||
&temp_stack);
|
&temp_stack);
|
||||||
|
|
||||||
token_stack_out->count = temp_stack.count;
|
token_array_out->count = temp_stack.count;
|
||||||
|
|
||||||
if (result == LexNeedTokenMemory){
|
if (result == LexResult_NeedTokenMemory){
|
||||||
if (token_stack_out->count < token_stack_out->max_count){
|
if (token_array_out->count < token_array_out->max_count){
|
||||||
result = LexHitTokenLimit;
|
result = LexResult_HitTokenLimit;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return(result);
|
return(result);
|
||||||
}
|
}
|
||||||
|
|
||||||
FCPP_LINK Cpp_Relex_State
|
#define HAS_NULL_TERM ((int32_t)(-1))
|
||||||
cpp_relex_nonalloc_start(char *data, int32_t size, Cpp_Token_Stack *stack,
|
#define NO_OUT_LIMIT ((int32_t)(-1))
|
||||||
|
|
||||||
|
FCPP_LINK Cpp_Lex_Result
|
||||||
|
cpp_lex_nonalloc(Cpp_Lex_Data *S_ptr, char *chunk, int32_t size, int32_t full_size,
|
||||||
|
Cpp_Token_Array *token_array_out, int32_t max_tokens_out)/*
|
||||||
|
DOC_PARAM(S_ptr, The lexer state. Go to the Cpp_Lex_Data section to see how to initialize the state.)
|
||||||
|
DOC_PARAM(chunk, The first or next chunk of the file being lexed.)
|
||||||
|
DOC_PARAM(size, The number of bytes in the chunk including the null terminator if the chunk ends in a null terminator.
|
||||||
|
If the chunk ends in a null terminator the system will interpret it as the end of the file.)
|
||||||
|
DOC_PARAM(full_size, If the final chunk is not null terminated this parameter should specify the length of the
|
||||||
|
file in bytes. To rely on an eventual null terminator use HAS_NULL_TERM for this parameter.)
|
||||||
|
DOC_PARAM(token_array_out, The token array structure that will receive the tokens output by the lexer.)
|
||||||
|
DOC_PARAM(max_tokens_out, The maximum number of tokens to be output to the token array. To rely on the
|
||||||
|
max built into the token array pass NO_OUT_LIMIT here.)
|
||||||
|
|
||||||
|
DOC(This call is the primary interface of the lexing system. It is quite general so it can be used in
|
||||||
|
a lot of different ways. I will explain the general rules first, and then give some examples of common
|
||||||
|
ways it might be used.
|
||||||
|
|
||||||
|
First a lexing state, Cpp_Lex_Data, must be initialized. The file to lex must be read into N contiguous chunks
|
||||||
|
of memory. An output Cpp_Token_Array must be allocated and initialized with the appropriate count and max_count values.
|
||||||
|
Then each chunk of the file must be passed to cpp_lex_nonalloc in order using the same lexing state for each call.
|
||||||
|
Every time a call to cpp_lex_nonalloc returns LexResult_NeedChunk, the next call to cpp_lex_nonalloc should use the
|
||||||
|
next chunk. If the return is some other value, the lexer hasn't finished with the current chunk and it sopped for some
|
||||||
|
other reason, so the same chunk should be used again in the next call.
|
||||||
|
|
||||||
|
If the file chunks contain a null terminator the lexer will return LexResult_Finished when it finds this character.
|
||||||
|
At this point calling the lexer again with the same state will result in an error. If you do not have a null
|
||||||
|
terminated chunk to end the file, you may instead pass the exact size in bytes of the entire file to the full_size
|
||||||
|
parameter and it will automatically handle the termination of the lexing state when it has read that many bytes.
|
||||||
|
If a full_size is specified and the system terminates for having seen that many bytes, it will return LexResult_Finished.
|
||||||
|
If a full_size is specified and a null character is read before the total number of bytes have been read the system will
|
||||||
|
still terminate as usual and return LexResult_Finished.
|
||||||
|
|
||||||
|
If the system has filled the entire output array it will return LexResult_NeedTokenMemory. When this happens if you
|
||||||
|
want to continue lexing the file you can grow the token array, or switch to a new output array and then call
|
||||||
|
cpp_lex_nonalloc again with the chunk that was being lexed and the new output. You can also specify a max_tokens_out
|
||||||
|
which is limits how many new tokens will be added to the token array. Even if token_array_out still had more space
|
||||||
|
to hold tokens, if the max_tokens_out limit is hit, the lexer will stop and return LexResult_HitTokenLimit. If this
|
||||||
|
happens there is still space left in the token array, so you can resume simply by calling cpp_lex_nonalloc again with
|
||||||
|
the same chunk and the same output array. Also note that, unlike the chunks which must only be replaced when the system
|
||||||
|
says it needs a chunk. You may switch to or modify the output array in between calls as much as you like.
|
||||||
|
|
||||||
|
The most basic use of this system is to get it all done in one big chunk and try to allocate a nearly "infinite" output
|
||||||
|
array so that it will not run out of memory. This way you can get the entire job done in one call and then just assert
|
||||||
|
to make sure it returns LexResult_Finished to you:
|
||||||
|
CODE_EXAMPLE(
|
||||||
|
Cpp_Token_Array lex_file(char *file_name){
|
||||||
|
File_Data file = read_whole_file(file_name);
|
||||||
|
|
||||||
|
Cpp_Lex_Data lex_state =
|
||||||
|
cpp_lex_data_init((char*)malloc(4096)); // hopefully big enough
|
||||||
|
|
||||||
|
Cpp_Token_Array array = {0};
|
||||||
|
array.tokens = (Cpp_Token*)malloc(1 << 20); // hopefully big enough
|
||||||
|
array.max_count = (1 << 20)/sizeof(Cpp_Token);
|
||||||
|
|
||||||
|
Cpp_Lex_Result result =
|
||||||
|
cpp_lex_nonalloc(&lex_state, file.data, file.size, file.size,
|
||||||
|
&array, NO_OUT_LIMIT);
|
||||||
|
Assert(result == LexResult_Finished);
|
||||||
|
|
||||||
|
free(lex_state.tb);
|
||||||
|
|
||||||
|
return(array);
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
DOC_SEE(Cpp_Lex_Data)
|
||||||
|
DOC_SEE(cpp_lex_file)
|
||||||
|
DOC_SEE(cpp_lex_nonalloc_null_end_no_limit)
|
||||||
|
DOC_SEE(cpp_lex_nonalloc_no_null_no_limit)
|
||||||
|
DOC_SEE(cpp_lex_nonalloc_null_end_out_limit)
|
||||||
|
DOC_SEE(cpp_lex_nonalloc_no_null_out_limit)
|
||||||
|
|
||||||
|
*/{
|
||||||
|
Cpp_Lex_Result result = 0;
|
||||||
|
if (full_size == HAS_NULL_TERM){
|
||||||
|
if (max_tokens_out == NO_OUT_LIMIT){
|
||||||
|
result = cpp_lex_nonalloc_null_end_no_limit(S_ptr, chunk, size, token_array_out);
|
||||||
|
}
|
||||||
|
else{
|
||||||
|
result = cpp_lex_nonalloc_null_end_out_limit(S_ptr, chunk, size, token_array_out, max_tokens_out);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else{
|
||||||
|
if (max_tokens_out == NO_OUT_LIMIT){
|
||||||
|
result = cpp_lex_nonalloc_no_null_no_limit(S_ptr, chunk, size, full_size, token_array_out);
|
||||||
|
}
|
||||||
|
else{
|
||||||
|
result = cpp_lex_nonalloc_no_null_out_limit(S_ptr, chunk, size, full_size, token_array_out, max_tokens_out);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return(result);
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO(allen): Get the relex system ready to work in chunks.
|
||||||
|
FCPP_INTERNAL Cpp_Relex_State
|
||||||
|
cpp_relex_nonalloc_start(char *data, int32_t size, Cpp_Token_Array *array,
|
||||||
int32_t start, int32_t end, int32_t amount, int32_t tolerance){
|
int32_t start, int32_t end, int32_t amount, int32_t tolerance){
|
||||||
Cpp_Relex_State state;
|
Cpp_Relex_State state;
|
||||||
state.data = data;
|
state.data = data;
|
||||||
state.size = size;
|
state.size = size;
|
||||||
state.stack = stack;
|
state.array = array;
|
||||||
state.start = start;
|
state.start = start;
|
||||||
state.end = end;
|
state.end = end;
|
||||||
state.amount = amount;
|
state.amount = amount;
|
||||||
state.tolerance = tolerance;
|
state.tolerance = tolerance;
|
||||||
|
|
||||||
Cpp_Get_Token_Result result = cpp_get_token(stack, start);
|
Cpp_Get_Token_Result result = cpp_get_token(array, start);
|
||||||
|
|
||||||
state.start_token_i = result.token_index-1;
|
state.start_token_i = result.token_index-1;
|
||||||
if (state.start_token_i < 0){
|
if (state.start_token_i < 0){
|
||||||
state.start_token_i = 0;
|
state.start_token_i = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
result = cpp_get_token(stack, end);
|
result = cpp_get_token(array, end);
|
||||||
|
|
||||||
state.end_token_i = result.token_index;
|
state.end_token_i = result.token_index;
|
||||||
if (end > stack->tokens[state.end_token_i].start){
|
if (end > array->tokens[state.end_token_i].start){
|
||||||
++state.end_token_i;
|
++state.end_token_i;
|
||||||
}
|
}
|
||||||
if (state.end_token_i < 0){
|
if (state.end_token_i < 0){
|
||||||
state.end_token_i = 0;
|
state.end_token_i = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
state.relex_start = stack->tokens[state.start_token_i].start;
|
state.relex_start = array->tokens[state.start_token_i].start;
|
||||||
if (start < state.relex_start){
|
if (start < state.relex_start){
|
||||||
state.relex_start = start;
|
state.relex_start = start;
|
||||||
}
|
}
|
||||||
|
@ -1032,18 +1087,18 @@ cpp_relex_nonalloc_start(char *data, int32_t size, Cpp_Token_Stack *stack,
|
||||||
return(state);
|
return(state);
|
||||||
}
|
}
|
||||||
|
|
||||||
FCPP_LINK char
|
FCPP_INTERNAL char
|
||||||
cpp_token_get_pp_state(uint16_t bitfield){
|
cpp_token_get_pp_state(uint16_t bitfield){
|
||||||
return (char)(bitfield);
|
return (char)(bitfield);
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO(allen): Eliminate this once we actually store the EOF token
|
// TODO(allen): Eliminate this once we actually store the EOF token
|
||||||
// in the token stack.
|
// in the token stack.
|
||||||
FCPP_LINK Cpp_Token
|
FCPP_INTERNAL Cpp_Token
|
||||||
cpp_index_stack(Cpp_Token_Stack *stack, int32_t file_size, int32_t index){
|
cpp_index_array(Cpp_Token_Array *array, int32_t file_size, int32_t index){
|
||||||
Cpp_Token result;
|
Cpp_Token result;
|
||||||
if (index < stack->count){
|
if (index < array->count){
|
||||||
result = stack->tokens[index];
|
result = array->tokens[index];
|
||||||
}
|
}
|
||||||
else{
|
else{
|
||||||
result.start = file_size;
|
result.start = file_size;
|
||||||
|
@ -1055,60 +1110,68 @@ cpp_index_stack(Cpp_Token_Stack *stack, int32_t file_size, int32_t index){
|
||||||
return(result);
|
return(result);
|
||||||
}
|
}
|
||||||
|
|
||||||
FCPP_LINK int32_t
|
FCPP_INTERNAL void
|
||||||
|
cpp_shift_token_starts(Cpp_Token_Array *array, int32_t from_token_i, int32_t shift_amount){
|
||||||
|
Cpp_Token *token = array->tokens + from_token_i;
|
||||||
|
int32_t count = array->count, i = 0;
|
||||||
|
|
||||||
|
for (i = from_token_i; i < count; ++i, ++token){
|
||||||
|
token->start += shift_amount;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
FCPP_INTERNAL int32_t
|
||||||
cpp_relex_nonalloc_main(Cpp_Relex_State *state,
|
cpp_relex_nonalloc_main(Cpp_Relex_State *state,
|
||||||
Cpp_Token_Stack *relex_stack,
|
Cpp_Token_Array *relex_array,
|
||||||
int32_t *relex_end,
|
int32_t *relex_end,
|
||||||
char *spare){
|
char *spare){
|
||||||
Cpp_Token_Stack *stack = state->stack;
|
Cpp_Token_Array *array = state->array;
|
||||||
Cpp_Token *tokens = stack->tokens;
|
Cpp_Token *tokens = array->tokens;
|
||||||
|
|
||||||
cpp_shift_token_starts(stack, state->end_token_i, state->amount);
|
cpp_shift_token_starts(array, state->end_token_i, state->amount);
|
||||||
|
|
||||||
Lex_Data lex = lex_data_init(spare);
|
Cpp_Lex_Data lex = cpp_lex_data_init(spare);
|
||||||
lex.pp_state = cpp_token_get_pp_state(tokens[state->start_token_i].state_flags);
|
lex.pp_state = cpp_token_get_pp_state(tokens[state->start_token_i].state_flags);
|
||||||
lex.pos = state->relex_start;
|
lex.pos = state->relex_start;
|
||||||
|
|
||||||
int32_t relex_end_i = state->end_token_i;
|
int32_t relex_end_i = state->end_token_i;
|
||||||
Cpp_Token match_token = cpp_index_stack(stack, state->size, relex_end_i);
|
Cpp_Token match_token = cpp_index_array(array, state->size, relex_end_i);
|
||||||
Cpp_Token end_token = match_token;
|
Cpp_Token end_token = match_token;
|
||||||
int32_t went_too_far = false;
|
int32_t went_too_far = false;
|
||||||
|
|
||||||
// TODO(allen): This can be better I suspect.
|
// TODO(allen): This can be better I suspect.
|
||||||
for (;;){
|
for (;;){
|
||||||
int32_t result =
|
int32_t result =
|
||||||
cpp_lex_size_nonalloc(&lex,
|
cpp_lex_nonalloc_no_null_out_limit(&lex, state->data,
|
||||||
state->data,
|
state->size, state->size,
|
||||||
state->size,
|
relex_array, 1);
|
||||||
state->size,
|
|
||||||
relex_stack, 1);
|
|
||||||
|
|
||||||
switch (result){
|
switch (result){
|
||||||
case LexHitTokenLimit:
|
case LexResult_HitTokenLimit:
|
||||||
{
|
{
|
||||||
Cpp_Token token = relex_stack->tokens[relex_stack->count-1];
|
Cpp_Token token = relex_array->tokens[relex_array->count-1];
|
||||||
if (token.start == end_token.start &&
|
if (token.start == end_token.start &&
|
||||||
token.size == end_token.size &&
|
token.size == end_token.size &&
|
||||||
token.flags == end_token.flags &&
|
token.flags == end_token.flags &&
|
||||||
token.state_flags == end_token.state_flags){
|
token.state_flags == end_token.state_flags){
|
||||||
--relex_stack->count;
|
--relex_array->count;
|
||||||
goto double_break;
|
goto double_break;
|
||||||
}
|
}
|
||||||
|
|
||||||
while (lex.pos > end_token.start && relex_end_i < stack->count){
|
while (lex.pos > end_token.start && relex_end_i < array->count){
|
||||||
++relex_end_i;
|
++relex_end_i;
|
||||||
end_token = cpp_index_stack(stack, state->size, relex_end_i);
|
end_token = cpp_index_array(array, state->size, relex_end_i);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case LexNeedChunk: Assert(!"Invalid path"); break;
|
case LexResult_NeedChunk: Assert(!"Invalid path"); break;
|
||||||
|
|
||||||
case LexNeedTokenMemory:
|
case LexResult_NeedTokenMemory:
|
||||||
went_too_far = true;
|
went_too_far = true;
|
||||||
goto double_break;
|
goto double_break;
|
||||||
|
|
||||||
case LexFinished:
|
case LexResult_Finished:
|
||||||
goto double_break;
|
goto double_break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1118,78 +1181,117 @@ cpp_relex_nonalloc_main(Cpp_Relex_State *state,
|
||||||
*relex_end = relex_end_i;
|
*relex_end = relex_end_i;
|
||||||
}
|
}
|
||||||
else{
|
else{
|
||||||
cpp_shift_token_starts(stack, state->end_token_i, -state->amount);
|
cpp_shift_token_starts(array, state->end_token_i, -state->amount);
|
||||||
}
|
}
|
||||||
|
|
||||||
return(went_too_far);
|
return(went_too_far);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
#if defined(FCPP_ALLOW_MALLOC)
|
#if defined(FCPP_ALLOW_MALLOC)
|
||||||
|
|
||||||
#include <stdlib.h>
|
#include <stdlib.h>
|
||||||
#include <string.h>
|
#include <string.h>
|
||||||
|
|
||||||
FCPP_LINK Cpp_Token_Stack
|
FCPP_LINK Cpp_Token_Array
|
||||||
cpp_make_token_stack(int32_t starting_max){
|
cpp_make_token_array(int32_t starting_max)/*
|
||||||
Cpp_Token_Stack token_stack;
|
DOC_PARAM(starting_max, The number of tokens to initialize the array with.)
|
||||||
token_stack.count = 0;
|
DOC_RETURN(An empty Cpp_Token_Array with memory malloc'd for storing tokens.)
|
||||||
token_stack.max_count = starting_max;
|
DOC(This call allocates a Cpp_Token_Array with malloc for use in other
|
||||||
token_stack.tokens = (Cpp_Token*)malloc(sizeof(Cpp_Token)*starting_max);
|
convenience functions. Stacks that are not allocated this way should not be
|
||||||
return(token_stack);
|
used in the convenience functions.
|
||||||
|
|
||||||
|
This call is a part of the FCPP_ALLOW_MALLOC convenience functions.
|
||||||
|
If you want to use it defined the macro FCPP_ALLOW_MALLOC before including 4cpp_lexer.h)
|
||||||
|
*/{
|
||||||
|
Cpp_Token_Array token_array;
|
||||||
|
token_array.tokens = (Cpp_Token*)malloc(sizeof(Cpp_Token)*starting_max);
|
||||||
|
token_array.count = 0;
|
||||||
|
token_array.max_count = starting_max;
|
||||||
|
return(token_array);
|
||||||
}
|
}
|
||||||
|
|
||||||
FCPP_LINK void
|
FCPP_LINK void
|
||||||
cpp_free_token_stack(Cpp_Token_Stack token_stack){
|
cpp_free_token_array(Cpp_Token_Array token_array)/*
|
||||||
free(token_stack.tokens);
|
DOC_PARAM(token_array, An array previously allocated by cpp_make_token_array)
|
||||||
|
DOC(This call frees a Cpp_Token_Array.
|
||||||
|
|
||||||
|
This call is a part of the FCPP_ALLOW_MALLOC convenience functions.
|
||||||
|
If you want to use it defined the macro FCPP_ALLOW_MALLOC before including 4cpp_lexer.h)
|
||||||
|
DOC_SEE(cpp_make_token_array)
|
||||||
|
*/{
|
||||||
|
free(token_array.tokens);
|
||||||
}
|
}
|
||||||
|
|
||||||
FCPP_LINK void
|
FCPP_LINK void
|
||||||
cpp_resize_token_stack(Cpp_Token_Stack *token_stack, int32_t new_max){
|
cpp_resize_token_array(Cpp_Token_Array *token_array, int32_t new_max)/*
|
||||||
Cpp_Token *new_tokens = (Cpp_Token*)malloc(sizeof(Cpp_Token)*new_max);
|
DOC_PARAM(token_array, An array previously allocated by cpp_make_token_array.)
|
||||||
|
DOC_PARAM(new_max, The new maximum size the array should support. If this is not greater
|
||||||
if (new_tokens){
|
than the current size of the array the operation is ignored.)
|
||||||
memcpy(new_tokens, token_stack->tokens, sizeof(Cpp_Token)*token_stack->count);
|
DOC(This call allocates a new memory chunk and moves the existing tokens in the array
|
||||||
free(token_stack->tokens);
|
over to the new chunk.
|
||||||
token_stack->tokens = new_tokens;
|
|
||||||
token_stack->max_count = new_max;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
FCPP_LINK void
|
This call is a part of the FCPP_ALLOW_MALLOC convenience functions.
|
||||||
cpp_push_token(Cpp_Token_Stack *token_stack, Cpp_Token token){
|
If you want to use it defined the macro FCPP_ALLOW_MALLOC before including 4cpp_lexer.h)
|
||||||
if (!cpp_push_token_nonalloc(token_stack, token)){
|
DOC_SEE(cpp_make_token_array)
|
||||||
int32_t new_max = 2*token_stack->max_count + 1;
|
*/{
|
||||||
cpp_resize_token_stack(token_stack, new_max);
|
if (new_max > token_array->count){
|
||||||
cpp_push_token_nonalloc(token_stack, token);
|
Cpp_Token *new_tokens = (Cpp_Token*)malloc(sizeof(Cpp_Token)*new_max);
|
||||||
|
|
||||||
|
if (new_tokens){
|
||||||
|
memcpy(new_tokens, token_array->tokens, sizeof(Cpp_Token)*token_array->count);
|
||||||
|
free(token_array->tokens);
|
||||||
|
token_array->tokens = new_tokens;
|
||||||
|
token_array->max_count = new_max;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
FCPP_LINK void
|
FCPP_LINK void
|
||||||
cpp_lex_file(char *data, int32_t size, Cpp_Token_Stack *token_stack_out){
|
cpp_lex_file(char *data, int32_t size, Cpp_Token_Array *token_array_out)/*
|
||||||
Lex_Data S = {0};
|
DOC_PARAM(data, The file data to be lexed in a single contiguous block.)
|
||||||
|
DOC_PARAM(size, The number of bytes in data.)
|
||||||
|
DOC_PARAM(token_array_out, The token array where the output tokens will be pushed.
|
||||||
|
This token array must be previously allocated with cpp_make_token_array)
|
||||||
|
DOC(Lexes an entire file and manages the interaction with the lexer system so that
|
||||||
|
it is quick and convenient to lex files.
|
||||||
|
|
||||||
|
This call is a part of the FCPP_ALLOW_MALLOC convenience functions.
|
||||||
|
If you want to use it defined the macro FCPP_ALLOW_MALLOC before including 4cpp_lexer.h)
|
||||||
|
DOC_SEE(cpp_make_token_array)
|
||||||
|
*/{
|
||||||
|
Cpp_Lex_Data S = {0};
|
||||||
S.tb = (char*)malloc(size);
|
S.tb = (char*)malloc(size);
|
||||||
int32_t quit = 0;
|
int32_t quit = 0;
|
||||||
|
|
||||||
token_stack_out->count = 0;
|
token_array_out->count = 0;
|
||||||
for (;!quit;){
|
for (;!quit;){
|
||||||
int32_t result = cpp_lex_nonalloc(&S, data, size, token_stack_out);
|
int32_t result = cpp_lex_nonalloc(&S, data, size, HAS_NULL_TERM, token_array_out, NO_OUT_LIMIT);
|
||||||
switch (result){
|
switch (result){
|
||||||
case LexFinished:
|
case LexResult_Finished:
|
||||||
{
|
{
|
||||||
quit = 1;
|
quit = 1;
|
||||||
}break;
|
}break;
|
||||||
|
|
||||||
case LexNeedChunk:
|
case LexResult_NeedChunk:
|
||||||
{
|
{
|
||||||
|
Assert(token_array_out->count < token_array_out->max_count);
|
||||||
|
|
||||||
|
// NOTE(allen): We told the system we would provide the null
|
||||||
|
// terminator, but we didn't actually, so provide the null
|
||||||
|
// terminator via this one byte chunk.
|
||||||
char empty = 0;
|
char empty = 0;
|
||||||
cpp_lex_nonalloc(&S, &empty, 1, token_stack_out);
|
cpp_lex_nonalloc(&S, &empty, 1, HAS_NULL_TERM, token_array_out, NO_OUT_LIMIT);
|
||||||
quit = 1;
|
|
||||||
}break;
|
}break;
|
||||||
|
|
||||||
case LexNeedTokenMemory:
|
case LexResult_NeedTokenMemory:
|
||||||
{
|
{
|
||||||
int32_t new_max = 2*token_stack_out->max_count + 1;
|
// NOTE(allen): We told the system to use all of the output memory
|
||||||
cpp_resize_token_stack(token_stack_out, new_max);
|
// but we ran out anyway, so allocate more memory. We hereby assume
|
||||||
|
// the stack was allocated using cpp_make_token_array.
|
||||||
|
int32_t new_max = 2*token_array_out->max_count + 1;
|
||||||
|
cpp_resize_token_array(token_array_out, new_max);
|
||||||
}break;
|
}break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,97 +0,0 @@
|
||||||
/*
|
|
||||||
* FSMs for 4cpp lexer
|
|
||||||
*
|
|
||||||
* 23.03.2016 (dd.mm.yyyy)
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
|
|
||||||
// TOP
|
|
||||||
|
|
||||||
#if !defined(FCPP_LEXER_FSMS_H)
|
|
||||||
#define FCPP_LEXER_FSMS_H
|
|
||||||
|
|
||||||
enum Lex_State{
|
|
||||||
LS_default,
|
|
||||||
LS_identifier,
|
|
||||||
LS_pound,
|
|
||||||
LS_pp,
|
|
||||||
LS_ppdef,
|
|
||||||
LS_char,
|
|
||||||
LS_char_multiline,
|
|
||||||
LS_char_slashed,
|
|
||||||
LS_string,
|
|
||||||
LS_string_multiline,
|
|
||||||
LS_string_slashed,
|
|
||||||
LS_number,
|
|
||||||
LS_number0,
|
|
||||||
LS_float,
|
|
||||||
LS_crazy_float0,
|
|
||||||
LS_crazy_float1,
|
|
||||||
LS_hex,
|
|
||||||
LS_comment_pre,
|
|
||||||
LS_comment,
|
|
||||||
LS_comment_slashed,
|
|
||||||
LS_comment_block,
|
|
||||||
LS_comment_block_ending,
|
|
||||||
LS_dot,
|
|
||||||
LS_ellipsis,
|
|
||||||
LS_less,
|
|
||||||
LS_less_less,
|
|
||||||
LS_more,
|
|
||||||
LS_more_more,
|
|
||||||
LS_minus,
|
|
||||||
LS_arrow,
|
|
||||||
LS_and,
|
|
||||||
LS_or,
|
|
||||||
LS_plus,
|
|
||||||
LS_colon,
|
|
||||||
LS_star,
|
|
||||||
LS_modulo,
|
|
||||||
LS_caret,
|
|
||||||
LS_eq,
|
|
||||||
LS_bang,
|
|
||||||
LS_error_message,
|
|
||||||
//
|
|
||||||
LS_count
|
|
||||||
};
|
|
||||||
|
|
||||||
enum Lex_Int_State{
|
|
||||||
LSINT_default,
|
|
||||||
LSINT_u,
|
|
||||||
LSINT_l,
|
|
||||||
LSINT_L,
|
|
||||||
LSINT_ul,
|
|
||||||
LSINT_uL,
|
|
||||||
LSINT_ll,
|
|
||||||
LSINT_extra,
|
|
||||||
//
|
|
||||||
LSINT_count
|
|
||||||
};
|
|
||||||
|
|
||||||
enum Lex_PP_State{
|
|
||||||
LSPP_default,
|
|
||||||
LSPP_include,
|
|
||||||
LSPP_macro_identifier,
|
|
||||||
LSPP_identifier,
|
|
||||||
LSPP_body_if,
|
|
||||||
LSPP_body,
|
|
||||||
LSPP_number,
|
|
||||||
LSPP_error,
|
|
||||||
LSPP_junk,
|
|
||||||
//
|
|
||||||
LSPP_count
|
|
||||||
};
|
|
||||||
|
|
||||||
struct Lex_FSM{
|
|
||||||
uint8_t state;
|
|
||||||
uint8_t int_state;
|
|
||||||
uint8_t emit_token;
|
|
||||||
uint8_t multi_line;
|
|
||||||
};
|
|
||||||
static Lex_FSM null_lex_fsm = {0};
|
|
||||||
|
|
||||||
#endif
|
|
||||||
|
|
||||||
// BOTTOM
|
|
||||||
|
|
||||||
|
|
|
@ -8,6 +8,10 @@
|
||||||
#define ENUM(type,name) typedef type name; enum name##_
|
#define ENUM(type,name) typedef type name; enum name##_
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
#ifndef INTERNAL_ENUM
|
||||||
|
#define INTERNAL_ENUM(type,name) typedef type name; enum name##_
|
||||||
|
#endif
|
||||||
|
|
||||||
/* DOC(A Cpp_Token_Type classifies a token to make parsing easier. Some types are not
|
/* DOC(A Cpp_Token_Type classifies a token to make parsing easier. Some types are not
|
||||||
actually output by the lexer, but exist because parsers will also make use of token
|
actually output by the lexer, but exist because parsers will also make use of token
|
||||||
types in their own output.) */
|
types in their own output.) */
|
||||||
|
@ -255,11 +259,11 @@ ENUM(uint16_t, Cpp_Preprocessor_State){
|
||||||
CPP_LEX_PP_COUNT
|
CPP_LEX_PP_COUNT
|
||||||
};
|
};
|
||||||
|
|
||||||
struct Cpp_Token_Stack{
|
struct Cpp_Token_Array{
|
||||||
Cpp_Token *tokens;
|
Cpp_Token *tokens;
|
||||||
int32_t count, max_count;
|
int32_t count, max_count;
|
||||||
};
|
};
|
||||||
static Cpp_Token_Stack null_cpp_token_stack = {0};
|
static Cpp_Token_Array null_cpp_token_array = {0};
|
||||||
|
|
||||||
struct Cpp_Get_Token_Result{
|
struct Cpp_Get_Token_Result{
|
||||||
int32_t token_index;
|
int32_t token_index;
|
||||||
|
@ -270,7 +274,7 @@ struct Cpp_Relex_State{
|
||||||
char *data;
|
char *data;
|
||||||
int32_t size;
|
int32_t size;
|
||||||
|
|
||||||
Cpp_Token_Stack *stack;
|
Cpp_Token_Array *array;
|
||||||
int32_t start, end, amount;
|
int32_t start, end, amount;
|
||||||
int32_t start_token_i;
|
int32_t start_token_i;
|
||||||
int32_t end_token_i;
|
int32_t end_token_i;
|
||||||
|
@ -279,6 +283,112 @@ struct Cpp_Relex_State{
|
||||||
int32_t space_request;
|
int32_t space_request;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
struct Cpp_Lex_FSM{
|
||||||
|
uint8_t state;
|
||||||
|
uint8_t int_state;
|
||||||
|
uint8_t emit_token;
|
||||||
|
uint8_t multi_line;
|
||||||
|
};
|
||||||
|
static Cpp_Lex_FSM null_lex_fsm = {0};
|
||||||
|
|
||||||
|
struct Cpp_Lex_Data{
|
||||||
|
char *tb;
|
||||||
|
int32_t tb_pos;
|
||||||
|
int32_t token_start;
|
||||||
|
|
||||||
|
int32_t pos;
|
||||||
|
int32_t pos_overide;
|
||||||
|
int32_t chunk_pos;
|
||||||
|
|
||||||
|
Cpp_Lex_FSM fsm;
|
||||||
|
uint8_t white_done;
|
||||||
|
uint8_t pp_state;
|
||||||
|
uint8_t completed;
|
||||||
|
|
||||||
|
Cpp_Token token;
|
||||||
|
|
||||||
|
int32_t __pc__;
|
||||||
|
};
|
||||||
|
|
||||||
|
ENUM(int32_t, Cpp_Lex_Result){
|
||||||
|
LexResult_Finished,
|
||||||
|
LexResult_NeedChunk,
|
||||||
|
LexResult_NeedTokenMemory,
|
||||||
|
LexResult_HitTokenLimit,
|
||||||
|
};
|
||||||
|
|
||||||
|
INTERNAL_ENUM(uint8_t, Cpp_Lex_State){
|
||||||
|
LS_default,
|
||||||
|
LS_identifier,
|
||||||
|
LS_pound,
|
||||||
|
LS_pp,
|
||||||
|
LS_ppdef,
|
||||||
|
LS_char,
|
||||||
|
LS_char_multiline,
|
||||||
|
LS_char_slashed,
|
||||||
|
LS_string,
|
||||||
|
LS_string_multiline,
|
||||||
|
LS_string_slashed,
|
||||||
|
LS_number,
|
||||||
|
LS_number0,
|
||||||
|
LS_float,
|
||||||
|
LS_crazy_float0,
|
||||||
|
LS_crazy_float1,
|
||||||
|
LS_hex,
|
||||||
|
LS_comment_pre,
|
||||||
|
LS_comment,
|
||||||
|
LS_comment_slashed,
|
||||||
|
LS_comment_block,
|
||||||
|
LS_comment_block_ending,
|
||||||
|
LS_dot,
|
||||||
|
LS_ellipsis,
|
||||||
|
LS_less,
|
||||||
|
LS_less_less,
|
||||||
|
LS_more,
|
||||||
|
LS_more_more,
|
||||||
|
LS_minus,
|
||||||
|
LS_arrow,
|
||||||
|
LS_and,
|
||||||
|
LS_or,
|
||||||
|
LS_plus,
|
||||||
|
LS_colon,
|
||||||
|
LS_star,
|
||||||
|
LS_modulo,
|
||||||
|
LS_caret,
|
||||||
|
LS_eq,
|
||||||
|
LS_bang,
|
||||||
|
LS_error_message,
|
||||||
|
//
|
||||||
|
LS_count
|
||||||
|
};
|
||||||
|
|
||||||
|
INTERNAL_ENUM(uint8_t, Cpp_Lex_Int_State){
|
||||||
|
LSINT_default,
|
||||||
|
LSINT_u,
|
||||||
|
LSINT_l,
|
||||||
|
LSINT_L,
|
||||||
|
LSINT_ul,
|
||||||
|
LSINT_uL,
|
||||||
|
LSINT_ll,
|
||||||
|
LSINT_extra,
|
||||||
|
//
|
||||||
|
LSINT_count
|
||||||
|
};
|
||||||
|
|
||||||
|
INTERNAL_ENUM(uint8_t, Cpp_Lex_PP_State){
|
||||||
|
LSPP_default,
|
||||||
|
LSPP_include,
|
||||||
|
LSPP_macro_identifier,
|
||||||
|
LSPP_identifier,
|
||||||
|
LSPP_body_if,
|
||||||
|
LSPP_body,
|
||||||
|
LSPP_number,
|
||||||
|
LSPP_error,
|
||||||
|
LSPP_junk,
|
||||||
|
//
|
||||||
|
LSPP_count
|
||||||
|
};
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
// BOTTOM
|
// BOTTOM
|
||||||
|
|
2
4ed.cpp
2
4ed.cpp
|
@ -502,7 +502,7 @@ case_change_range(System_Functions *system,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (file->state.token_stack.tokens)
|
if (file->state.token_array.tokens)
|
||||||
file_relex_parallel(system, mem, file, range.start, range.end, 0);
|
file_relex_parallel(system, mem, file, range.start, range.end, 0);
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
|
@ -541,7 +541,7 @@ DOC_SEE(Access_Flag)
|
||||||
}
|
}
|
||||||
|
|
||||||
internal i32
|
internal i32
|
||||||
seek_token_left(Cpp_Token_Stack *tokens, i32 pos){
|
seek_token_left(Cpp_Token_Array *tokens, i32 pos){
|
||||||
Cpp_Get_Token_Result get = cpp_get_token(tokens, pos);
|
Cpp_Get_Token_Result get = cpp_get_token(tokens, pos);
|
||||||
if (get.token_index == -1){
|
if (get.token_index == -1){
|
||||||
get.token_index = 0;
|
get.token_index = 0;
|
||||||
|
@ -556,7 +556,7 @@ seek_token_left(Cpp_Token_Stack *tokens, i32 pos){
|
||||||
}
|
}
|
||||||
|
|
||||||
internal i32
|
internal i32
|
||||||
seek_token_right(Cpp_Token_Stack *tokens, i32 pos){
|
seek_token_right(Cpp_Token_Array *tokens, i32 pos){
|
||||||
Cpp_Get_Token_Result get = cpp_get_token(tokens, pos);
|
Cpp_Get_Token_Result get = cpp_get_token(tokens, pos);
|
||||||
if (get.in_whitespace){
|
if (get.in_whitespace){
|
||||||
++get.token_index;
|
++get.token_index;
|
||||||
|
@ -609,7 +609,7 @@ DOC_SEE(4coder_Buffer_Positioning_System)
|
||||||
|
|
||||||
if (flags & (1 << 1)){
|
if (flags & (1 << 1)){
|
||||||
if (file->state.tokens_complete){
|
if (file->state.tokens_complete){
|
||||||
pos[1] = seek_token_right(&file->state.token_stack, start_pos);
|
pos[1] = seek_token_right(&file->state.token_array, start_pos);
|
||||||
}
|
}
|
||||||
else{
|
else{
|
||||||
pos[1] = buffer_seek_whitespace_right(&file->state.buffer, start_pos);
|
pos[1] = buffer_seek_whitespace_right(&file->state.buffer, start_pos);
|
||||||
|
@ -640,7 +640,7 @@ DOC_SEE(4coder_Buffer_Positioning_System)
|
||||||
|
|
||||||
if (flags & (1 << 1)){
|
if (flags & (1 << 1)){
|
||||||
if (file->state.tokens_complete){
|
if (file->state.tokens_complete){
|
||||||
pos[1] = seek_token_left(&file->state.token_stack, start_pos);
|
pos[1] = seek_token_left(&file->state.token_array, start_pos);
|
||||||
}
|
}
|
||||||
else{
|
else{
|
||||||
pos[1] = buffer_seek_whitespace_left(&file->state.buffer, start_pos);
|
pos[1] = buffer_seek_whitespace_left(&file->state.buffer, start_pos);
|
||||||
|
@ -942,7 +942,7 @@ DOC_SEE(4coder_Buffer_Positioning_System)
|
||||||
bool32 result = false;
|
bool32 result = false;
|
||||||
|
|
||||||
Editing_File *file = imp_get_file(cmd, buffer);
|
Editing_File *file = imp_get_file(cmd, buffer);
|
||||||
if (file && file->state.token_stack.tokens &&
|
if (file && file->state.token_array.tokens &&
|
||||||
file->state.tokens_complete && !file->state.still_lexing){
|
file->state.tokens_complete && !file->state.still_lexing){
|
||||||
result = true;
|
result = true;
|
||||||
|
|
||||||
|
|
|
@ -135,8 +135,8 @@ struct Editing_File_State{
|
||||||
|
|
||||||
Undo_Data undo;
|
Undo_Data undo;
|
||||||
|
|
||||||
Cpp_Token_Stack token_stack;
|
Cpp_Token_Array token_array;
|
||||||
Cpp_Token_Stack swap_stack;
|
Cpp_Token_Array swap_array;
|
||||||
u32 lex_job;
|
u32 lex_job;
|
||||||
b32 tokens_complete;
|
b32 tokens_complete;
|
||||||
b32 still_lexing;
|
b32 still_lexing;
|
||||||
|
|
|
@ -1083,13 +1083,13 @@ internal void
|
||||||
file_close(System_Functions *system, General_Memory *general, Editing_File *file){
|
file_close(System_Functions *system, General_Memory *general, Editing_File *file){
|
||||||
if (file->state.still_lexing){
|
if (file->state.still_lexing){
|
||||||
system->cancel_job(BACKGROUND_THREADS, file->state.lex_job);
|
system->cancel_job(BACKGROUND_THREADS, file->state.lex_job);
|
||||||
if (file->state.swap_stack.tokens){
|
if (file->state.swap_array.tokens){
|
||||||
general_memory_free(general, file->state.swap_stack.tokens);
|
general_memory_free(general, file->state.swap_array.tokens);
|
||||||
file->state.swap_stack.tokens = 0;
|
file->state.swap_array.tokens = 0;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (file->state.token_stack.tokens){
|
if (file->state.token_array.tokens){
|
||||||
general_memory_free(general, file->state.token_stack.tokens);
|
general_memory_free(general, file->state.token_array.tokens);
|
||||||
}
|
}
|
||||||
|
|
||||||
Buffer_Type *buffer = &file->state.buffer;
|
Buffer_Type *buffer = &file->state.buffer;
|
||||||
|
@ -1136,25 +1136,25 @@ Job_Callback_Sig(job_full_lex){
|
||||||
|
|
||||||
char *tb = (char*)memory->data;
|
char *tb = (char*)memory->data;
|
||||||
|
|
||||||
Cpp_Token_Stack tokens;
|
Cpp_Token_Array tokens;
|
||||||
tokens.tokens = (Cpp_Token*)((char*)memory->data + buffer_size);
|
tokens.tokens = (Cpp_Token*)((char*)memory->data + buffer_size);
|
||||||
tokens.max_count = (memory->size - buffer_size) / sizeof(Cpp_Token);
|
tokens.max_count = (memory->size - buffer_size) / sizeof(Cpp_Token);
|
||||||
tokens.count = 0;
|
tokens.count = 0;
|
||||||
|
|
||||||
b32 still_lexing = 1;
|
b32 still_lexing = 1;
|
||||||
|
|
||||||
Lex_Data lex = lex_data_init(tb);
|
Cpp_Lex_Data lex = cpp_lex_data_init(tb);
|
||||||
|
|
||||||
do{
|
do{
|
||||||
i32 result =
|
i32 result =
|
||||||
cpp_lex_size_nonalloc(&lex,
|
cpp_lex_nonalloc(&lex,
|
||||||
text_data, text_size, text_size,
|
text_data, text_size, text_size,
|
||||||
&tokens, 2048);
|
&tokens, 2048);
|
||||||
|
|
||||||
switch (result){
|
switch (result){
|
||||||
case LexNeedChunk: Assert(!"Invalid Path"); break;
|
case LexResult_NeedChunk: Assert(!"Invalid Path"); break;
|
||||||
|
|
||||||
case LexNeedTokenMemory:
|
case LexResult_NeedTokenMemory:
|
||||||
if (system->check_cancel(thread)){
|
if (system->check_cancel(thread)){
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -1164,13 +1164,13 @@ Job_Callback_Sig(job_full_lex){
|
||||||
tokens.max_count = (memory->size - buffer_size) / sizeof(Cpp_Token);
|
tokens.max_count = (memory->size - buffer_size) / sizeof(Cpp_Token);
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case LexHitTokenLimit:
|
case LexResult_HitTokenLimit:
|
||||||
if (system->check_cancel(thread)){
|
if (system->check_cancel(thread)){
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case LexFinished: still_lexing = 0; break;
|
case LexResult_Finished: still_lexing = 0; break;
|
||||||
}
|
}
|
||||||
} while (still_lexing);
|
} while (still_lexing);
|
||||||
|
|
||||||
|
@ -1178,27 +1178,27 @@ Job_Callback_Sig(job_full_lex){
|
||||||
|
|
||||||
system->acquire_lock(FRAME_LOCK);
|
system->acquire_lock(FRAME_LOCK);
|
||||||
{
|
{
|
||||||
Assert(file->state.swap_stack.tokens == 0);
|
Assert(file->state.swap_array.tokens == 0);
|
||||||
file->state.swap_stack.tokens = (Cpp_Token*)
|
file->state.swap_array.tokens = (Cpp_Token*)
|
||||||
general_memory_allocate(general, new_max*sizeof(Cpp_Token));
|
general_memory_allocate(general, new_max*sizeof(Cpp_Token));
|
||||||
}
|
}
|
||||||
system->release_lock(FRAME_LOCK);
|
system->release_lock(FRAME_LOCK);
|
||||||
|
|
||||||
u8 *dest = (u8*)file->state.swap_stack.tokens;
|
u8 *dest = (u8*)file->state.swap_array.tokens;
|
||||||
u8 *src = (u8*)tokens.tokens;
|
u8 *src = (u8*)tokens.tokens;
|
||||||
|
|
||||||
memcpy(dest, src, tokens.count*sizeof(Cpp_Token));
|
memcpy(dest, src, tokens.count*sizeof(Cpp_Token));
|
||||||
|
|
||||||
system->acquire_lock(FRAME_LOCK);
|
system->acquire_lock(FRAME_LOCK);
|
||||||
{
|
{
|
||||||
Cpp_Token_Stack *file_stack = &file->state.token_stack;
|
Cpp_Token_Array *file_token_array = &file->state.token_array;
|
||||||
file_stack->count = tokens.count;
|
file_token_array->count = tokens.count;
|
||||||
file_stack->max_count = new_max;
|
file_token_array->max_count = new_max;
|
||||||
if (file_stack->tokens){
|
if (file_token_array->tokens){
|
||||||
general_memory_free(general, file_stack->tokens);
|
general_memory_free(general, file_token_array->tokens);
|
||||||
}
|
}
|
||||||
file_stack->tokens = file->state.swap_stack.tokens;
|
file_token_array->tokens = file->state.swap_array.tokens;
|
||||||
file->state.swap_stack.tokens = 0;
|
file->state.swap_array.tokens = 0;
|
||||||
}
|
}
|
||||||
system->release_lock(FRAME_LOCK);
|
system->release_lock(FRAME_LOCK);
|
||||||
|
|
||||||
|
@ -1216,16 +1216,16 @@ file_kill_tokens(System_Functions *system,
|
||||||
file->settings.tokens_exist = 0;
|
file->settings.tokens_exist = 0;
|
||||||
if (file->state.still_lexing){
|
if (file->state.still_lexing){
|
||||||
system->cancel_job(BACKGROUND_THREADS, file->state.lex_job);
|
system->cancel_job(BACKGROUND_THREADS, file->state.lex_job);
|
||||||
if (file->state.swap_stack.tokens){
|
if (file->state.swap_array.tokens){
|
||||||
general_memory_free(general, file->state.swap_stack.tokens);
|
general_memory_free(general, file->state.swap_array.tokens);
|
||||||
file->state.swap_stack.tokens = 0;
|
file->state.swap_array.tokens = 0;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (file->state.token_stack.tokens){
|
if (file->state.token_array.tokens){
|
||||||
general_memory_free(general, file->state.token_stack.tokens);
|
general_memory_free(general, file->state.token_array.tokens);
|
||||||
}
|
}
|
||||||
file->state.tokens_complete = 0;
|
file->state.tokens_complete = 0;
|
||||||
file->state.token_stack = null_cpp_token_stack;
|
file->state.token_array = null_cpp_token_array;
|
||||||
}
|
}
|
||||||
|
|
||||||
#if BUFFER_EXPERIMENT_SCALPEL <= 0
|
#if BUFFER_EXPERIMENT_SCALPEL <= 0
|
||||||
|
@ -1235,7 +1235,7 @@ file_first_lex_parallel(System_Functions *system,
|
||||||
file->settings.tokens_exist = 1;
|
file->settings.tokens_exist = 1;
|
||||||
|
|
||||||
if (file->is_loading == 0 && file->state.still_lexing == 0){
|
if (file->is_loading == 0 && file->state.still_lexing == 0){
|
||||||
Assert(file->state.token_stack.tokens == 0);
|
Assert(file->state.token_array.tokens == 0);
|
||||||
|
|
||||||
file->state.tokens_complete = 0;
|
file->state.tokens_complete = 0;
|
||||||
file->state.still_lexing = 1;
|
file->state.still_lexing = 1;
|
||||||
|
@ -1256,7 +1256,7 @@ file_relex_parallel(System_Functions *system,
|
||||||
General_Memory *general = &mem->general;
|
General_Memory *general = &mem->general;
|
||||||
Partition *part = &mem->part;
|
Partition *part = &mem->part;
|
||||||
|
|
||||||
if (file->state.token_stack.tokens == 0){
|
if (file->state.token_array.tokens == 0){
|
||||||
file_first_lex_parallel(system, general, file);
|
file_first_lex_parallel(system, general, file);
|
||||||
return(false);
|
return(false);
|
||||||
}
|
}
|
||||||
|
@ -1267,15 +1267,15 @@ file_relex_parallel(System_Functions *system,
|
||||||
char *data = file->state.buffer.data;
|
char *data = file->state.buffer.data;
|
||||||
i32 size = file->state.buffer.size;
|
i32 size = file->state.buffer.size;
|
||||||
|
|
||||||
Cpp_Token_Stack *stack = &file->state.token_stack;
|
Cpp_Token_Array *array = &file->state.token_array;
|
||||||
|
|
||||||
Cpp_Relex_State state =
|
Cpp_Relex_State state =
|
||||||
cpp_relex_nonalloc_start(data, size, stack,
|
cpp_relex_nonalloc_start(data, size, array,
|
||||||
start_i, end_i, amount, 100);
|
start_i, end_i, amount, 100);
|
||||||
|
|
||||||
Temp_Memory temp = begin_temp_memory(part);
|
Temp_Memory temp = begin_temp_memory(part);
|
||||||
i32 relex_end;
|
i32 relex_end;
|
||||||
Cpp_Token_Stack relex_space;
|
Cpp_Token_Array relex_space;
|
||||||
relex_space.count = 0;
|
relex_space.count = 0;
|
||||||
relex_space.max_count = state.space_request;
|
relex_space.max_count = state.space_request;
|
||||||
relex_space.tokens = push_array(part, Cpp_Token, relex_space.max_count);
|
relex_space.tokens = push_array(part, Cpp_Token, relex_space.max_count);
|
||||||
|
@ -1289,27 +1289,27 @@ file_relex_parallel(System_Functions *system,
|
||||||
i32 shift_amount = relex_space.count - delete_amount;
|
i32 shift_amount = relex_space.count - delete_amount;
|
||||||
|
|
||||||
if (shift_amount != 0){
|
if (shift_amount != 0){
|
||||||
i32 new_count = stack->count + shift_amount;
|
i32 new_count = array->count + shift_amount;
|
||||||
if (new_count > stack->max_count){
|
if (new_count > array->max_count){
|
||||||
i32 new_max = LargeRoundUp(new_count, Kbytes(1));
|
i32 new_max = LargeRoundUp(new_count, Kbytes(1));
|
||||||
stack->tokens = (Cpp_Token*)
|
array->tokens = (Cpp_Token*)
|
||||||
general_memory_reallocate(general, stack->tokens,
|
general_memory_reallocate(general, array->tokens,
|
||||||
stack->count*sizeof(Cpp_Token),
|
array->count*sizeof(Cpp_Token),
|
||||||
new_max*sizeof(Cpp_Token));
|
new_max*sizeof(Cpp_Token));
|
||||||
stack->max_count = new_max;
|
array->max_count = new_max;
|
||||||
}
|
}
|
||||||
|
|
||||||
i32 shift_size = stack->count - relex_end;
|
i32 shift_size = array->count - relex_end;
|
||||||
if (shift_size > 0){
|
if (shift_size > 0){
|
||||||
Cpp_Token *old_base = stack->tokens + relex_end;
|
Cpp_Token *old_base = array->tokens + relex_end;
|
||||||
memmove(old_base + shift_amount, old_base,
|
memmove(old_base + shift_amount, old_base,
|
||||||
sizeof(Cpp_Token)*shift_size);
|
sizeof(Cpp_Token)*shift_size);
|
||||||
}
|
}
|
||||||
|
|
||||||
stack->count += shift_amount;
|
array->count += shift_amount;
|
||||||
}
|
}
|
||||||
|
|
||||||
memcpy(state.stack->tokens + state.start_token_i, relex_space.tokens,
|
memcpy(state.array->tokens + state.start_token_i, relex_space.tokens,
|
||||||
sizeof(Cpp_Token)*relex_space.count);
|
sizeof(Cpp_Token)*relex_space.count);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1317,17 +1317,21 @@ file_relex_parallel(System_Functions *system,
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!inline_lex){
|
if (!inline_lex){
|
||||||
Cpp_Token_Stack *stack = &file->state.token_stack;
|
Cpp_Token_Array *array = &file->state.token_array;
|
||||||
Cpp_Get_Token_Result get_token_result = cpp_get_token(stack, end_i);
|
Cpp_Get_Token_Result get_token_result = cpp_get_token(array, end_i);
|
||||||
i32 end_token_i = get_token_result.token_index;
|
i32 end_token_i = get_token_result.token_index;
|
||||||
|
|
||||||
if (end_token_i < 0) end_token_i = 0;
|
if (end_token_i < 0){
|
||||||
else if (end_i > stack->tokens[end_token_i].start) ++end_token_i;
|
end_token_i = 0;
|
||||||
|
}
|
||||||
|
else if (end_i > array->tokens[end_token_i].start){
|
||||||
|
++end_token_i;
|
||||||
|
}
|
||||||
|
|
||||||
cpp_shift_token_starts(stack, end_token_i, amount);
|
cpp_shift_token_starts(array, end_token_i, amount);
|
||||||
--end_token_i;
|
--end_token_i;
|
||||||
if (end_token_i >= 0){
|
if (end_token_i >= 0){
|
||||||
Cpp_Token *token = stack->tokens + end_token_i;
|
Cpp_Token *token = array->tokens + end_token_i;
|
||||||
if (token->start < end_i && token->start + token->size > end_i){
|
if (token->start < end_i && token->start + token->size > end_i){
|
||||||
token->size += amount;
|
token->size += amount;
|
||||||
}
|
}
|
||||||
|
@ -1914,9 +1918,9 @@ file_pre_edit_maintenance(System_Functions *system,
|
||||||
Editing_File *file){
|
Editing_File *file){
|
||||||
if (file->state.still_lexing){
|
if (file->state.still_lexing){
|
||||||
system->cancel_job(BACKGROUND_THREADS, file->state.lex_job);
|
system->cancel_job(BACKGROUND_THREADS, file->state.lex_job);
|
||||||
if (file->state.swap_stack.tokens){
|
if (file->state.swap_array.tokens){
|
||||||
general_memory_free(general, file->state.swap_stack.tokens);
|
general_memory_free(general, file->state.swap_array.tokens);
|
||||||
file->state.swap_stack.tokens = 0;
|
file->state.swap_array.tokens = 0;
|
||||||
}
|
}
|
||||||
file->state.still_lexing = 0;
|
file->state.still_lexing = 0;
|
||||||
}
|
}
|
||||||
|
@ -2175,7 +2179,7 @@ file_do_batch_edit(System_Functions *system, Models *models, Editing_File *file,
|
||||||
case BatchEdit_PreserveTokens:
|
case BatchEdit_PreserveTokens:
|
||||||
{
|
{
|
||||||
if (file->state.tokens_complete){
|
if (file->state.tokens_complete){
|
||||||
Cpp_Token_Stack tokens = file->state.token_stack;
|
Cpp_Token_Array tokens = file->state.token_array;
|
||||||
Cpp_Token *token = tokens.tokens;
|
Cpp_Token *token = tokens.tokens;
|
||||||
Cpp_Token *end_token = tokens.tokens + tokens.count;
|
Cpp_Token *end_token = tokens.tokens + tokens.count;
|
||||||
Cpp_Token original = {(Cpp_Token_Type)0};
|
Cpp_Token original = {(Cpp_Token_Type)0};
|
||||||
|
@ -2531,21 +2535,21 @@ struct Make_Batch_Result{
|
||||||
};
|
};
|
||||||
|
|
||||||
internal Cpp_Token*
|
internal Cpp_Token*
|
||||||
get_first_token_at_line(Buffer *buffer, Cpp_Token_Stack tokens, i32 line){
|
get_first_token_at_line(Buffer *buffer, Cpp_Token_Array tokens, i32 line){
|
||||||
Cpp_Token *result = 0;
|
i32 start_pos = buffer->line_starts[line];
|
||||||
i32 start_pos = 0;
|
Cpp_Get_Token_Result get_token = cpp_get_token(&tokens, start_pos);
|
||||||
Cpp_Get_Token_Result get_token = {0};
|
|
||||||
|
|
||||||
start_pos = buffer->line_starts[line];
|
if (get_token.in_whitespace){
|
||||||
get_token = cpp_get_token(&tokens, start_pos);
|
get_token.token_index += 1;
|
||||||
if (get_token.in_whitespace) get_token.token_index += 1;
|
}
|
||||||
result = tokens.tokens + get_token.token_index;
|
|
||||||
|
Cpp_Token *result = tokens.tokens + get_token.token_index;
|
||||||
|
|
||||||
return(result);
|
return(result);
|
||||||
}
|
}
|
||||||
|
|
||||||
internal Cpp_Token*
|
internal Cpp_Token*
|
||||||
seek_matching_token_backwards(Cpp_Token_Stack tokens, Cpp_Token *token,
|
seek_matching_token_backwards(Cpp_Token_Array tokens, Cpp_Token *token,
|
||||||
Cpp_Token_Type open_type, Cpp_Token_Type close_type){
|
Cpp_Token_Type open_type, Cpp_Token_Type close_type){
|
||||||
int32_t nesting_level = 0;
|
int32_t nesting_level = 0;
|
||||||
if (token <= tokens.tokens){
|
if (token <= tokens.tokens){
|
||||||
|
@ -2667,7 +2671,7 @@ compute_this_indent(Buffer *buffer, Indent_Parse_State indent,
|
||||||
}
|
}
|
||||||
|
|
||||||
internal i32*
|
internal i32*
|
||||||
get_line_indentation_marks(Partition *part, Buffer *buffer, Cpp_Token_Stack tokens,
|
get_line_indentation_marks(Partition *part, Buffer *buffer, Cpp_Token_Array tokens,
|
||||||
i32 line_start, i32 line_end, i32 tab_width){
|
i32 line_start, i32 line_end, i32 tab_width){
|
||||||
|
|
||||||
i32 indent_mark_count = line_end - line_start;
|
i32 indent_mark_count = line_end - line_start;
|
||||||
|
@ -2935,7 +2939,7 @@ file_auto_tab_tokens(System_Functions *system, Models *models,
|
||||||
Buffer *buffer = &file->state.buffer;
|
Buffer *buffer = &file->state.buffer;
|
||||||
|
|
||||||
Assert(file && !file->is_dummy);
|
Assert(file && !file->is_dummy);
|
||||||
Cpp_Token_Stack tokens = file->state.token_stack;
|
Cpp_Token_Array tokens = file->state.token_array;
|
||||||
Assert(tokens.tokens);
|
Assert(tokens.tokens);
|
||||||
|
|
||||||
i32 line_start = buffer_get_line_index(buffer, start);
|
i32 line_start = buffer_get_line_index(buffer, start);
|
||||||
|
@ -3138,7 +3142,7 @@ init_normal_file(System_Functions *system, Models *models, Editing_File *file,
|
||||||
String val = make_string(buffer, size);
|
String val = make_string(buffer, size);
|
||||||
file_create_from_string(system, models, file, val);
|
file_create_from_string(system, models, file, val);
|
||||||
|
|
||||||
if (file->settings.tokens_exist && file->state.token_stack.tokens == 0){
|
if (file->settings.tokens_exist && file->state.token_array.tokens == 0){
|
||||||
file_first_lex_parallel(system, general, file);
|
file_first_lex_parallel(system, general, file);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3156,7 +3160,7 @@ init_read_only_file(System_Functions *system, Models *models, Editing_File *file
|
||||||
String val = null_string;
|
String val = null_string;
|
||||||
file_create_from_string(system, models, file, val, 1);
|
file_create_from_string(system, models, file, val, 1);
|
||||||
|
|
||||||
if (file->settings.tokens_exist && file->state.token_stack.tokens == 0){
|
if (file->settings.tokens_exist && file->state.token_array.tokens == 0){
|
||||||
file_first_lex_parallel(system, general, file);
|
file_first_lex_parallel(system, general, file);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3467,7 +3471,7 @@ update_highlighting(View *view){
|
||||||
}
|
}
|
||||||
|
|
||||||
else if (file->state.tokens_complete){
|
else if (file->state.tokens_complete){
|
||||||
Cpp_Token_Stack *tokens = &file->state.token_stack;
|
Cpp_Token_Stack *tokens = &file->state.token_array;
|
||||||
Cpp_Get_Token_Result result = cpp_get_token(tokens, pos);
|
Cpp_Get_Token_Result result = cpp_get_token(tokens, pos);
|
||||||
Cpp_Token token = tokens->tokens[result.token_index];
|
Cpp_Token token = tokens->tokens[result.token_index];
|
||||||
if (!result.in_whitespace){
|
if (!result.in_whitespace){
|
||||||
|
@ -5299,10 +5303,10 @@ draw_file_loaded(View *view, i32_Rect rect, b32 is_active, Render_Target *target
|
||||||
Assert(view->edit_pos);
|
Assert(view->edit_pos);
|
||||||
|
|
||||||
b32 tokens_use = 0;
|
b32 tokens_use = 0;
|
||||||
Cpp_Token_Stack token_stack = {};
|
Cpp_Token_Array token_array = {};
|
||||||
if (file){
|
if (file){
|
||||||
tokens_use = file->state.tokens_complete && (file->state.token_stack.count > 0);
|
tokens_use = file->state.tokens_complete && (file->state.token_array.count > 0);
|
||||||
token_stack = file->state.token_stack;
|
token_array = file->state.token_array;
|
||||||
}
|
}
|
||||||
|
|
||||||
Partition *part = &models->mem.part;
|
Partition *part = &models->mem.part;
|
||||||
|
@ -5372,8 +5376,8 @@ draw_file_loaded(View *view, i32_Rect rect, b32 is_active, Render_Target *target
|
||||||
u32 main_color = style->main.default_color;
|
u32 main_color = style->main.default_color;
|
||||||
u32 special_color = style->main.special_character_color;
|
u32 special_color = style->main.special_character_color;
|
||||||
if (tokens_use){
|
if (tokens_use){
|
||||||
Cpp_Get_Token_Result result = cpp_get_token(&token_stack, items->index);
|
Cpp_Get_Token_Result result = cpp_get_token(&token_array, items->index);
|
||||||
main_color = *style_get_color(style, token_stack.tokens[result.token_index]);
|
main_color = *style_get_color(style, token_array.tokens[result.token_index]);
|
||||||
token_i = result.token_index + 1;
|
token_i = result.token_index + 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -5388,13 +5392,13 @@ draw_file_loaded(View *view, i32_Rect rect, b32 is_active, Render_Target *target
|
||||||
i32 ind = item->index;
|
i32 ind = item->index;
|
||||||
highlight_this_color = 0;
|
highlight_this_color = 0;
|
||||||
if (tokens_use && ind != prev_ind){
|
if (tokens_use && ind != prev_ind){
|
||||||
Cpp_Token current_token = token_stack.tokens[token_i-1];
|
Cpp_Token current_token = token_array.tokens[token_i-1];
|
||||||
|
|
||||||
if (token_i < token_stack.count){
|
if (token_i < token_array.count){
|
||||||
if (ind >= token_stack.tokens[token_i].start){
|
if (ind >= token_array.tokens[token_i].start){
|
||||||
main_color =
|
main_color =
|
||||||
*style_get_color(style, token_stack.tokens[token_i]);
|
*style_get_color(style, token_array.tokens[token_i]);
|
||||||
current_token = token_stack.tokens[token_i];
|
current_token = token_array.tokens[token_i];
|
||||||
++token_i;
|
++token_i;
|
||||||
}
|
}
|
||||||
else if (ind >= current_token.start + current_token.size){
|
else if (ind >= current_token.start + current_token.size){
|
||||||
|
|
312
4ed_metagen.cpp
312
4ed_metagen.cpp
|
@ -410,7 +410,7 @@ typedef struct Item_Set{
|
||||||
|
|
||||||
typedef struct Parse{
|
typedef struct Parse{
|
||||||
String code;
|
String code;
|
||||||
Cpp_Token_Stack tokens;
|
Cpp_Token_Array tokens;
|
||||||
int32_t item_count;
|
int32_t item_count;
|
||||||
} Parse;
|
} Parse;
|
||||||
|
|
||||||
|
@ -440,10 +440,10 @@ get_lexeme(Cpp_Token token, char *code){
|
||||||
}
|
}
|
||||||
|
|
||||||
static Parse_Context
|
static Parse_Context
|
||||||
setup_parse_context(char *data, Cpp_Token_Stack stack){
|
setup_parse_context(char *data, Cpp_Token_Array array){
|
||||||
Parse_Context context;
|
Parse_Context context;
|
||||||
context.token_s = stack.tokens;
|
context.token_s = array.tokens;
|
||||||
context.token_e = stack.tokens + stack.count;
|
context.token_e = array.tokens + array.count;
|
||||||
context.token = context.token_s;
|
context.token = context.token_s;
|
||||||
context.data = data;
|
context.data = data;
|
||||||
return(context);
|
return(context);
|
||||||
|
@ -536,7 +536,7 @@ static Parse
|
||||||
meta_lex(char *filename){
|
meta_lex(char *filename){
|
||||||
Parse result = {0};
|
Parse result = {0};
|
||||||
result.code = file_dump(filename);
|
result.code = file_dump(filename);
|
||||||
result.tokens = cpp_make_token_stack(1024);
|
result.tokens = cpp_make_token_array(1024);
|
||||||
cpp_lex_file(result.code.str, result.code.size, &result.tokens);
|
cpp_lex_file(result.code.str, result.code.size, &result.tokens);
|
||||||
return(result);
|
return(result);
|
||||||
}
|
}
|
||||||
|
@ -584,9 +584,19 @@ typedef enum Doc_Note_Type{
|
||||||
DOC_PARAM,
|
DOC_PARAM,
|
||||||
DOC_RETURN,
|
DOC_RETURN,
|
||||||
DOC,
|
DOC,
|
||||||
DOC_SEE
|
DOC_SEE,
|
||||||
|
DOC_HIDE
|
||||||
} Doc_Note_Type;
|
} Doc_Note_Type;
|
||||||
|
|
||||||
|
static String
|
||||||
|
doc_note_string[] = {
|
||||||
|
make_lit_string("DOC_PARAM"),
|
||||||
|
make_lit_string("DOC_RETURN"),
|
||||||
|
make_lit_string("DOC"),
|
||||||
|
make_lit_string("DOC_SEE"),
|
||||||
|
make_lit_string("DOC_HIDE"),
|
||||||
|
};
|
||||||
|
|
||||||
static int32_t
|
static int32_t
|
||||||
check_and_fix_docs(String *doc_string){
|
check_and_fix_docs(String *doc_string){
|
||||||
int32_t result = false;
|
int32_t result = false;
|
||||||
|
@ -628,14 +638,6 @@ get_doc_string_from_prev(Parse_Context *context, String *doc_string){
|
||||||
return(result);
|
return(result);
|
||||||
}
|
}
|
||||||
|
|
||||||
static String
|
|
||||||
doc_note_string[] = {
|
|
||||||
make_lit_string("DOC_PARAM"),
|
|
||||||
make_lit_string("DOC_RETURN"),
|
|
||||||
make_lit_string("DOC"),
|
|
||||||
make_lit_string("DOC_SEE"),
|
|
||||||
};
|
|
||||||
|
|
||||||
static String
|
static String
|
||||||
doc_parse_note(String source, int32_t *pos){
|
doc_parse_note(String source, int32_t *pos){
|
||||||
String result = {0};
|
String result = {0};
|
||||||
|
@ -1781,6 +1783,7 @@ print_macro_html(String *out, String name, Argument_Breakdown breakdown){
|
||||||
#define BACK_COLOR "#FAFAFA"
|
#define BACK_COLOR "#FAFAFA"
|
||||||
#define TEXT_COLOR "#0D0D0D"
|
#define TEXT_COLOR "#0D0D0D"
|
||||||
#define CODE_BACK "#DFDFDF"
|
#define CODE_BACK "#DFDFDF"
|
||||||
|
#define EXAMPLE_BACK "#EFEFDF"
|
||||||
|
|
||||||
#define POP_COLOR_1 "#309030"
|
#define POP_COLOR_1 "#309030"
|
||||||
#define POP_BACK_1 "#E0FFD0"
|
#define POP_BACK_1 "#E0FFD0"
|
||||||
|
@ -1790,9 +1793,12 @@ print_macro_html(String *out, String name, Argument_Breakdown breakdown){
|
||||||
|
|
||||||
#define CODE_STYLE "font-family: \"Courier New\", Courier, monospace; text-align: left;"
|
#define CODE_STYLE "font-family: \"Courier New\", Courier, monospace; text-align: left;"
|
||||||
|
|
||||||
#define DESCRIPT_SECTION_STYLE \
|
#define CODE_BLOCK_STYLE(back) \
|
||||||
"margin-top: 3mm; margin-bottom: 3mm; font-size: .95em; " \
|
"margin-top: 3mm; margin-bottom: 3mm; font-size: .95em; " \
|
||||||
"background: "CODE_BACK"; padding: 0.25em;"
|
"background: "back"; padding: 0.25em;"
|
||||||
|
|
||||||
|
#define DESCRIPT_SECTION_STYLE CODE_BLOCK_STYLE(CODE_BACK)
|
||||||
|
#define EXAMPLE_CODE_STYLE CODE_BLOCK_STYLE(EXAMPLE_BACK)
|
||||||
|
|
||||||
#define DOC_HEAD_OPEN "<div style='margin-top: 3mm; margin-bottom: 3mm; color: "POP_COLOR_1";'><b><i>"
|
#define DOC_HEAD_OPEN "<div style='margin-top: 3mm; margin-bottom: 3mm; color: "POP_COLOR_1";'><b><i>"
|
||||||
#define DOC_HEAD_CLOSE "</i></b></div>"
|
#define DOC_HEAD_CLOSE "</i></b></div>"
|
||||||
|
@ -1808,6 +1814,227 @@ print_macro_html(String *out, String name, Argument_Breakdown breakdown){
|
||||||
#define DOC_ITEM_OPEN "<div style='margin-left: 5mm; margin-right: 5mm;'>"
|
#define DOC_ITEM_OPEN "<div style='margin-left: 5mm; margin-right: 5mm;'>"
|
||||||
#define DOC_ITEM_CLOSE "</div>"
|
#define DOC_ITEM_CLOSE "</div>"
|
||||||
|
|
||||||
|
#define EXAMPLE_CODE_OPEN "<div style='"CODE_STYLE EXAMPLE_CODE_STYLE"'>"
|
||||||
|
#define EXAMPLE_CODE_CLOSE "</div>"
|
||||||
|
|
||||||
|
static String
|
||||||
|
get_first_double_line(String source){
|
||||||
|
String line = {0};
|
||||||
|
int32_t pos0 = find_substr_s(source, 0, make_lit_string("\n\n"));
|
||||||
|
int32_t pos1 = find_substr_s(source, 0, make_lit_string("\r\n\r\n"));
|
||||||
|
if (pos1 < pos0){
|
||||||
|
pos0 = pos1;
|
||||||
|
}
|
||||||
|
line = substr(source, 0, pos0);
|
||||||
|
return(line);
|
||||||
|
}
|
||||||
|
|
||||||
|
static String
|
||||||
|
get_next_double_line(String source, String line){
|
||||||
|
String next = {0};
|
||||||
|
int32_t pos = (int32_t)(line.str - source.str) + line.size;
|
||||||
|
int32_t start = 0, pos0 = 0, pos1 = 0;
|
||||||
|
|
||||||
|
if (pos < source.size){
|
||||||
|
assert(source.str[pos] == '\n' || source.str[pos] == '\r');
|
||||||
|
start = pos + 1;
|
||||||
|
|
||||||
|
if (start < source.size){
|
||||||
|
pos0 = find_substr_s(source, start, make_lit_string("\n\n"));
|
||||||
|
pos1 = find_substr_s(source, start, make_lit_string("\r\n\r\n"));
|
||||||
|
if (pos1 < pos0){
|
||||||
|
pos0 = pos1;
|
||||||
|
}
|
||||||
|
next = substr(source, start, pos0 - start);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return(next);
|
||||||
|
}
|
||||||
|
|
||||||
|
static String
|
||||||
|
get_next_word(String source, String prev_word){
|
||||||
|
String word = {0};
|
||||||
|
int32_t pos0 = (int32_t)(prev_word.str - source.str) + prev_word.size;
|
||||||
|
int32_t pos1 = 0;
|
||||||
|
char c = 0;
|
||||||
|
|
||||||
|
for (; pos0 < source.size; ++pos0){
|
||||||
|
c = source.str[pos0];
|
||||||
|
if (!(char_is_whitespace(c) || c == '(' || c == ')')){
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (pos0 < source.size){
|
||||||
|
for (pos1 = pos0; pos1 < source.size; ++pos1){
|
||||||
|
c = source.str[pos1];
|
||||||
|
if (char_is_whitespace(c) || c == '(' || c == ')'){
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
word = substr(source, pos0, pos1 - pos0);
|
||||||
|
}
|
||||||
|
|
||||||
|
return(word);
|
||||||
|
}
|
||||||
|
|
||||||
|
static String
|
||||||
|
get_first_word(String source){
|
||||||
|
String start_str = make_string(source.str, 0);
|
||||||
|
String word = get_next_word(source, start_str);
|
||||||
|
return(word);
|
||||||
|
}
|
||||||
|
|
||||||
|
enum Doc_Chunk_Type{
|
||||||
|
DocChunk_PlainText,
|
||||||
|
DocChunk_CodeExample,
|
||||||
|
|
||||||
|
DocChunk_Count
|
||||||
|
};
|
||||||
|
|
||||||
|
static String doc_chunk_headers[] = {
|
||||||
|
make_lit_string(""),
|
||||||
|
make_lit_string("CODE_EXAMPLE"),
|
||||||
|
};
|
||||||
|
|
||||||
|
static String
|
||||||
|
get_next_doc_chunk(String source, String prev_chunk, Doc_Chunk_Type *type){
|
||||||
|
String chunk = {0};
|
||||||
|
String word = {0};
|
||||||
|
int32_t pos = source.size;
|
||||||
|
int32_t word_index = 0;
|
||||||
|
Doc_Chunk_Type t = DocChunk_PlainText;
|
||||||
|
|
||||||
|
int32_t start_pos = (int32_t)(prev_chunk.str - source.str) + prev_chunk.size;
|
||||||
|
String source_tail = substr_tail(source, start_pos);
|
||||||
|
|
||||||
|
Assert(DocChunk_Count == ArrayCount(doc_chunk_headers));
|
||||||
|
|
||||||
|
for (word = get_first_word(source_tail);
|
||||||
|
word.str;
|
||||||
|
word = get_next_word(source_tail, word), ++word_index){
|
||||||
|
|
||||||
|
for (int32_t i = 1; i < DocChunk_Count; ++i){
|
||||||
|
if (match_ss(word, doc_chunk_headers[i])){
|
||||||
|
pos = (int32_t)(word.str - source.str);
|
||||||
|
t = (Doc_Chunk_Type)i;
|
||||||
|
goto doublebreak;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
doublebreak:;
|
||||||
|
|
||||||
|
*type = DocChunk_PlainText;
|
||||||
|
if (word_index == 0){
|
||||||
|
*type = t;
|
||||||
|
|
||||||
|
int32_t nest_level = 1;
|
||||||
|
int32_t i = find_s_char(source, pos, '(');
|
||||||
|
for (++i; i < source.size; ++i){
|
||||||
|
if (source.str[i] == '('){
|
||||||
|
++nest_level;
|
||||||
|
}
|
||||||
|
else if (source.str[i] == ')'){
|
||||||
|
--nest_level;
|
||||||
|
if (nest_level == 0){
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pos = i+1;
|
||||||
|
}
|
||||||
|
|
||||||
|
chunk = substr(source, start_pos, pos - start_pos);
|
||||||
|
|
||||||
|
int32_t is_all_white = 1;
|
||||||
|
for (int32_t i = 0; i < chunk.size; ++i){
|
||||||
|
if (!char_is_whitespace(chunk.str[i])){
|
||||||
|
is_all_white = 0;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (is_all_white){
|
||||||
|
chunk = null_string;
|
||||||
|
}
|
||||||
|
|
||||||
|
return(chunk);
|
||||||
|
}
|
||||||
|
|
||||||
|
static String
|
||||||
|
get_first_doc_chunk(String source, Doc_Chunk_Type *type){
|
||||||
|
String start_str = make_string(source.str, 0);
|
||||||
|
String chunk = get_next_doc_chunk(source, start_str, type);
|
||||||
|
return(chunk);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
static void
|
||||||
|
print_doc_description(String *out, Partition *part, String src){
|
||||||
|
Doc_Chunk_Type type;
|
||||||
|
|
||||||
|
for (String chunk = get_first_doc_chunk(src, &type);
|
||||||
|
chunk.str;
|
||||||
|
chunk = get_next_doc_chunk(src, chunk, &type)){
|
||||||
|
|
||||||
|
switch (type){
|
||||||
|
case DocChunk_PlainText:
|
||||||
|
{
|
||||||
|
for (String line = get_first_double_line(chunk);
|
||||||
|
line.str;
|
||||||
|
line = get_next_double_line(chunk, line)){
|
||||||
|
append_ss(out, line);
|
||||||
|
append_sc(out, "<br><br>");
|
||||||
|
}
|
||||||
|
}break;
|
||||||
|
|
||||||
|
case DocChunk_CodeExample:
|
||||||
|
{
|
||||||
|
int32_t start = 0;
|
||||||
|
int32_t end = chunk.size-1;
|
||||||
|
while (start < end && chunk.str[start] != '(') ++start;
|
||||||
|
start += 1;
|
||||||
|
while (end > start && chunk.str[end] != ')') --end;
|
||||||
|
|
||||||
|
|
||||||
|
append_sc(out, EXAMPLE_CODE_OPEN);
|
||||||
|
|
||||||
|
if (start < end){
|
||||||
|
String code_example = substr(chunk, start, end - start);
|
||||||
|
int32_t first_line = 1;
|
||||||
|
|
||||||
|
for (String line = get_first_line(code_example);
|
||||||
|
line.str;
|
||||||
|
line = get_next_line(code_example, line)){
|
||||||
|
|
||||||
|
if (!(first_line && line.size == 0)){
|
||||||
|
int32_t space_i = 0;
|
||||||
|
for (; space_i < line.size; ++space_i){
|
||||||
|
if (line.str[space_i] == ' '){
|
||||||
|
append_sc(out, " ");
|
||||||
|
}
|
||||||
|
else{
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
String line_tail = substr_tail(line, space_i);
|
||||||
|
append_ss(out, line_tail);
|
||||||
|
append_sc(out, "<br>");
|
||||||
|
}
|
||||||
|
first_line = 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
append_sc(out, EXAMPLE_CODE_CLOSE);
|
||||||
|
}break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
print_struct_docs(String *out, Partition *part, Item_Node *member){
|
print_struct_docs(String *out, Partition *part, Item_Node *member){
|
||||||
for (Item_Node *member_iter = member->first_child;
|
for (Item_Node *member_iter = member->first_child;
|
||||||
|
@ -1829,7 +2056,8 @@ print_struct_docs(String *out, Partition *part, Item_Node *member){
|
||||||
append_sc(out, DOC_ITEM_HEAD_INL_CLOSE"</div>");
|
append_sc(out, DOC_ITEM_HEAD_INL_CLOSE"</div>");
|
||||||
|
|
||||||
append_sc(out, "<div style='margin-bottom: 6mm;'>"DOC_ITEM_OPEN);
|
append_sc(out, "<div style='margin-bottom: 6mm;'>"DOC_ITEM_OPEN);
|
||||||
append_ss(out, doc.main_doc);
|
// TODO(allen): append_ss(out, doc.main_doc);
|
||||||
|
print_doc_description(out, part, doc.main_doc);
|
||||||
append_sc(out, DOC_ITEM_CLOSE"</div>");
|
append_sc(out, DOC_ITEM_CLOSE"</div>");
|
||||||
|
|
||||||
append_sc(out, "</div>");
|
append_sc(out, "</div>");
|
||||||
|
@ -1854,30 +2082,6 @@ print_see_also(String *out, Documentation *doc){
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static void
|
|
||||||
print_see_also(FILE *file, Documentation *doc){
|
|
||||||
int32_t doc_see_count = doc->see_also_count;
|
|
||||||
if (doc_see_count > 0){
|
|
||||||
fprintf(file, DOC_HEAD_OPEN"See Also"DOC_HEAD_CLOSE);
|
|
||||||
|
|
||||||
for (int32_t j = 0; j < doc_see_count; ++j){
|
|
||||||
String see_also = doc->see_also[j];
|
|
||||||
fprintf(file,
|
|
||||||
DOC_ITEM_OPEN"<a href='#%.*s_doc'>%.*s</a>"DOC_ITEM_CLOSE,
|
|
||||||
see_also.size, see_also.str,
|
|
||||||
see_also.size, see_also.str
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static void
|
|
||||||
print_str(FILE *file, String str){
|
|
||||||
if (str.size > 0){
|
|
||||||
fprintf(file, "%.*s", str.size, str.str);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static void
|
static void
|
||||||
print_function_body_code(String *out, Parse_Context *context, int32_t start){
|
print_function_body_code(String *out, Parse_Context *context, int32_t start){
|
||||||
String pstr = {0}, lexeme = {0};
|
String pstr = {0}, lexeme = {0};
|
||||||
|
@ -1968,7 +2172,8 @@ print_function_docs(String *out, Partition *part, String name, String doc_string
|
||||||
String main_doc = doc.main_doc;
|
String main_doc = doc.main_doc;
|
||||||
if (main_doc.size != 0){
|
if (main_doc.size != 0){
|
||||||
append_sc(out, DOC_HEAD_OPEN"Description"DOC_HEAD_CLOSE DOC_ITEM_OPEN);
|
append_sc(out, DOC_HEAD_OPEN"Description"DOC_HEAD_CLOSE DOC_ITEM_OPEN);
|
||||||
append_ss(out, main_doc);
|
// TODO(allen): append_ss(out, main_doc);
|
||||||
|
print_doc_description(out, part, main_doc);
|
||||||
append_sc(out, DOC_ITEM_CLOSE);
|
append_sc(out, DOC_ITEM_CLOSE);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2076,9 +2281,13 @@ print_item(String *out, Partition *part, Used_Links *used,
|
||||||
append_sc(out, DOC_HEAD_OPEN"Description"DOC_HEAD_CLOSE);
|
append_sc(out, DOC_HEAD_OPEN"Description"DOC_HEAD_CLOSE);
|
||||||
|
|
||||||
append_sc(out, DOC_ITEM_OPEN);
|
append_sc(out, DOC_ITEM_OPEN);
|
||||||
append_ss(out, main_doc);
|
// TODO(allen): append_ss(out, main_doc);
|
||||||
|
print_doc_description(out, part, main_doc);
|
||||||
append_sc(out, DOC_ITEM_CLOSE);
|
append_sc(out, DOC_ITEM_CLOSE);
|
||||||
}
|
}
|
||||||
|
else{
|
||||||
|
fprintf(stderr, "warning: no documentation string for %.*s\n", name.size, name.str);
|
||||||
|
}
|
||||||
|
|
||||||
print_see_also(out, &doc);
|
print_see_also(out, &doc);
|
||||||
|
|
||||||
|
@ -2104,9 +2313,13 @@ print_item(String *out, Partition *part, Used_Links *used,
|
||||||
append_sc(out, DOC_HEAD_OPEN"Description"DOC_HEAD_CLOSE);
|
append_sc(out, DOC_HEAD_OPEN"Description"DOC_HEAD_CLOSE);
|
||||||
|
|
||||||
append_sc(out, DOC_ITEM_OPEN);
|
append_sc(out, DOC_ITEM_OPEN);
|
||||||
append_ss(out, main_doc);
|
// TODO(allen): append_ss(out, main_doc);
|
||||||
|
print_doc_description(out, part, main_doc);
|
||||||
append_sc(out, DOC_ITEM_CLOSE);
|
append_sc(out, DOC_ITEM_CLOSE);
|
||||||
}
|
}
|
||||||
|
else{
|
||||||
|
fprintf(stderr, "warning: no documentation string for %.*s\n", name.size, name.str);
|
||||||
|
}
|
||||||
|
|
||||||
if (item->first_child){
|
if (item->first_child){
|
||||||
append_sc(out, DOC_HEAD_OPEN"Values"DOC_HEAD_CLOSE);
|
append_sc(out, DOC_HEAD_OPEN"Values"DOC_HEAD_CLOSE);
|
||||||
|
@ -2132,7 +2345,8 @@ print_item(String *out, Partition *part, Used_Links *used,
|
||||||
append_sc(out, "</span></div>");
|
append_sc(out, "</span></div>");
|
||||||
|
|
||||||
append_sc(out, "<div style='margin-bottom: 6mm;'>"DOC_ITEM_OPEN);
|
append_sc(out, "<div style='margin-bottom: 6mm;'>"DOC_ITEM_OPEN);
|
||||||
append_ss(out, doc.main_doc);
|
// TODO(allen): append_ss(out, doc.main_doc);
|
||||||
|
print_doc_description(out, part, doc.main_doc);
|
||||||
append_sc(out, DOC_ITEM_CLOSE"</div>");
|
append_sc(out, DOC_ITEM_CLOSE"</div>");
|
||||||
|
|
||||||
append_sc(out, "</div>");
|
append_sc(out, "</div>");
|
||||||
|
@ -2164,9 +2378,13 @@ print_item(String *out, Partition *part, Used_Links *used,
|
||||||
append_sc(out, DOC_HEAD_OPEN"Description"DOC_HEAD_CLOSE);
|
append_sc(out, DOC_HEAD_OPEN"Description"DOC_HEAD_CLOSE);
|
||||||
|
|
||||||
append_sc(out, DOC_ITEM_OPEN);
|
append_sc(out, DOC_ITEM_OPEN);
|
||||||
append_ss(out, main_doc);
|
// TODO(allen): append_ss(out, main_doc);
|
||||||
|
print_doc_description(out, part, main_doc);
|
||||||
append_sc(out, DOC_ITEM_CLOSE);
|
append_sc(out, DOC_ITEM_CLOSE);
|
||||||
}
|
}
|
||||||
|
else{
|
||||||
|
fprintf(stderr, "warning: no documentation string for %.*s\n", name.size, name.str);
|
||||||
|
}
|
||||||
|
|
||||||
if (member->first_child){
|
if (member->first_child){
|
||||||
append_sc(out, DOC_HEAD_OPEN"Fields"DOC_HEAD_CLOSE);
|
append_sc(out, DOC_HEAD_OPEN"Fields"DOC_HEAD_CLOSE);
|
||||||
|
|
7
build.c
7
build.c
|
@ -713,12 +713,9 @@ build_main(char *cdir, uint32_t flags){
|
||||||
static void
|
static void
|
||||||
standard_build(char *cdir, uint32_t flags){
|
standard_build(char *cdir, uint32_t flags){
|
||||||
fsm_generator(cdir);
|
fsm_generator(cdir);
|
||||||
|
|
||||||
metagen(cdir);
|
metagen(cdir);
|
||||||
|
do_buildsuper(cdir);
|
||||||
//do_buildsuper(cdir);
|
build_main(cdir, flags);
|
||||||
|
|
||||||
//build_main(cdir, flags);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#define PACK_DIR "../distributions"
|
#define PACK_DIR "../distributions"
|
||||||
|
|
|
@ -15,7 +15,6 @@
|
||||||
#define ArrayCount(a) (sizeof(a)/sizeof(*a))
|
#define ArrayCount(a) (sizeof(a)/sizeof(*a))
|
||||||
|
|
||||||
#include "4cpp_lexer_types.h"
|
#include "4cpp_lexer_types.h"
|
||||||
#include "4cpp_lexer_fsms.h"
|
|
||||||
#include "4ed_mem_ansi.c"
|
#include "4ed_mem_ansi.c"
|
||||||
|
|
||||||
typedef struct Whitespace_FSM{
|
typedef struct Whitespace_FSM{
|
||||||
|
@ -34,8 +33,8 @@ whitespace_skip_fsm(Whitespace_FSM wfsm, char c){
|
||||||
return(wfsm);
|
return(wfsm);
|
||||||
}
|
}
|
||||||
|
|
||||||
Lex_FSM
|
Cpp_Lex_FSM
|
||||||
int_fsm(Lex_FSM fsm, char c){
|
int_fsm(Cpp_Lex_FSM fsm, char c){
|
||||||
switch (fsm.int_state){
|
switch (fsm.int_state){
|
||||||
case LSINT_default:
|
case LSINT_default:
|
||||||
switch (c){
|
switch (c){
|
||||||
|
@ -98,8 +97,8 @@ int_fsm(Lex_FSM fsm, char c){
|
||||||
return(fsm);
|
return(fsm);
|
||||||
}
|
}
|
||||||
|
|
||||||
Lex_FSM
|
Cpp_Lex_FSM
|
||||||
main_fsm(Lex_FSM fsm, unsigned char pp_state, unsigned char c){
|
main_fsm(Cpp_Lex_FSM fsm, uint8_t pp_state, uint8_t c){
|
||||||
if (c == 0){
|
if (c == 0){
|
||||||
fsm.emit_token = 1;
|
fsm.emit_token = 1;
|
||||||
}
|
}
|
||||||
|
@ -610,8 +609,8 @@ generate_int_table(){
|
||||||
allocate_full_tables(&table, state_count);
|
allocate_full_tables(&table, state_count);
|
||||||
|
|
||||||
int32_t i = 0;
|
int32_t i = 0;
|
||||||
Lex_FSM fsm = {0};
|
Cpp_Lex_FSM fsm = {0};
|
||||||
Lex_FSM new_fsm;
|
Cpp_Lex_FSM new_fsm = {0};
|
||||||
for (uint16_t c = 0; c < 256; ++c){
|
for (uint16_t c = 0; c < 256; ++c){
|
||||||
for (uint8_t state = 0; state < state_count; ++state){
|
for (uint8_t state = 0; state < state_count; ++state){
|
||||||
fsm.int_state = state;
|
fsm.int_state = state;
|
||||||
|
@ -633,8 +632,8 @@ generate_fsm_table(uint8_t pp_state){
|
||||||
allocate_full_tables(&table, state_count);
|
allocate_full_tables(&table, state_count);
|
||||||
|
|
||||||
int32_t i = 0;
|
int32_t i = 0;
|
||||||
Lex_FSM fsm = {0};
|
Cpp_Lex_FSM fsm = {0};
|
||||||
Lex_FSM new_fsm;
|
Cpp_Lex_FSM new_fsm = {0};
|
||||||
for (uint16_t c = 0; c < 256; ++c){
|
for (uint16_t c = 0; c < 256; ++c){
|
||||||
for (uint8_t state = 0; state < state_count; ++state){
|
for (uint8_t state = 0; state < state_count; ++state){
|
||||||
fsm.state = state;
|
fsm.state = state;
|
||||||
|
|
|
@ -213,7 +213,7 @@ Stream mode can be enabled with -S or -F flags on the command line to 4ed.)
|
||||||
|
|
||||||
API_EXPORT bool32
|
API_EXPORT bool32
|
||||||
Is_Fullscreen(Application_Links *app)/*
|
Is_Fullscreen(Application_Links *app)/*
|
||||||
DOC_SEE(This call returns true if the 4coder is in full screen mode. This call
|
DOC(This call returns true if the 4coder is in full screen mode. This call
|
||||||
takes toggles that have already occured this frame into account. So it may return
|
takes toggles that have already occured this frame into account. So it may return
|
||||||
true even though the frame has not ended and actually put 4coder into full screen. If
|
true even though the frame has not ended and actually put 4coder into full screen. If
|
||||||
it returns true though, 4coder will definitely be full screen by the beginning of the next
|
it returns true though, 4coder will definitely be full screen by the beginning of the next
|
||||||
|
@ -229,7 +229,7 @@ frame if the state is not changed.)
|
||||||
|
|
||||||
API_EXPORT void
|
API_EXPORT void
|
||||||
Send_Exit_Signal(Application_Links *app)/*
|
Send_Exit_Signal(Application_Links *app)/*
|
||||||
DOC_SEE(This call sends a signal to 4coder to attempt to exit. If there are unsaved
|
DOC(This call sends a signal to 4coder to attempt to exit. If there are unsaved
|
||||||
files this triggers a dialogue ensuring you're okay with closing.)
|
files this triggers a dialogue ensuring you're okay with closing.)
|
||||||
*/{
|
*/{
|
||||||
win32vars.send_exit_signal = 1;
|
win32vars.send_exit_signal = 1;
|
||||||
|
|
Loading…
Reference in New Issue