2828
2929#define TOKEN_CONST_KW 15 + MAX_TOKEN_LIMIT
3030
31- #define TOKEN_BEGIN_GROUP 16 + MAX_TOKEN_LIMIT
31+ #define TOKEN_GROUP 16 + MAX_TOKEN_LIMIT
3232#define TOKEN_END_GROUP 17 + MAX_TOKEN_LIMIT
3333
3434#define END 18 + MAX_TOKEN_LIMIT
@@ -139,7 +139,7 @@ token* lexer::read_token() {
139139 case 264645514 : // break
140140 return last_tok = new token (TOKEN_BREAK);
141141 case 271304754 :
142- return last_tok = new token (TOKEN_BEGIN_GROUP );
142+ return last_tok = new token (TOKEN_GROUP );
143143 case 303295209 :
144144 return last_tok = new token (TOKEN_END_GROUP);
145145 default : {
@@ -318,24 +318,24 @@ std::list<token*> lexer::tokenize(bool interactive_mode) {
318318token* lexer::tokenize_statement (bool interactive_mode) {
319319 switch (last_tok->type )
320320 {
321- case TOKEN_BEGIN_GROUP : {
321+ case TOKEN_GROUP : {
322322 delete last_tok;
323323 match_tok (read_token (), TOKEN_IDENTIFIER);
324324 identifier_token* id = (identifier_token*)last_tok;
325- id-> no_delete ();
326- lexer_state->group_stack . push_back (( char *)id-> get_identifier () );
327- delete id;
325+
326+ lexer_state->new_group (id );
327+
328328 read_token ();
329+
329330 return nullptr ;
330331 }
331- case TOKEN_END_GROUP:
332- if (lexer_state->group_stack .empty ()) {
333- throw ERROR_UNEXPECTED_TOKEN;
334- }
335- delete[] lexer_state->group_stack .back ();
336- lexer_state->group_stack .pop_back ();
332+ case TOKEN_END_GROUP:{
333+ lexer_state->pop_group ();
334+
337335 read_token ();
338- return nullptr ;
336+
337+ return nullptr ;
338+ }
339339 case TOKEN_CONST_KW: {
340340 delete last_tok;
341341 match_tok (read_token (), TOKEN_IDENTIFIER);
@@ -427,7 +427,7 @@ token* lexer::tokenize_statement(bool interactive_mode) {
427427 case TOKEN_STRUCT_KW: {
428428 delete last_tok;
429429 match_tok (read_token (), TOKEN_IDENTIFIER);
430- identifier_token* proto_id = apply_groups (( identifier_token*)last_tok) ;
430+ identifier_token* proto_id = ( identifier_token*)last_tok;
431431 match_tok (read_token (), TOKEN_OPEN_BRACE);
432432 delete last_tok;
433433 std::list<identifier_token*> properties;
@@ -440,12 +440,13 @@ token* lexer::tokenize_statement(bool interactive_mode) {
440440 throw ERROR_UNEXPECTED_END;
441441 delete last_tok;
442442 read_token ();
443+ lexer_state->declare_id (proto_id);
443444 return new structure_prototype (proto_id, properties);
444445 }
445446 case TOKEN_FUNC_KW: {
446447 delete last_tok;
447448 match_tok (read_token (), TOKEN_IDENTIFIER);
448- identifier_token* proto_id = apply_groups (( identifier_token*)last_tok) ;
449+ identifier_token* proto_id = ( identifier_token*)last_tok;
449450 match_tok (read_token (), TOKEN_OPEN_PARAM);
450451 delete last_tok;
451452 std::list<identifier_token*> params;
@@ -462,6 +463,7 @@ token* lexer::tokenize_statement(bool interactive_mode) {
462463 throw ERROR_UNEXPECTED_TOKEN;
463464 delete last_tok;
464465 read_token ();
466+ lexer_state->declare_id (proto_id);
465467 return new function_prototype (proto_id, params, tokenize_body ());
466468 }
467469 case TOKEN_STATIC_KW: {
@@ -532,34 +534,6 @@ variable_access_token* lexer::tokenize_var_access(identifier_token* identifier)
532534 }
533535 return new variable_access_token (toks);
534536}
535-
536- identifier_token* lexer::apply_groups (identifier_token* id) {
537- char * base_id = (char *)id->get_identifier ();
538- id->no_delete ();
539- delete id;
540-
541- std::list<char > chars;
542-
543- for (size_t i = 0 ; i < strlen (base_id); i++)
544- chars.push_back (base_id[i]);
545-
546- for (auto group_kw = lexer_state->group_stack .rbegin (); group_kw != lexer_state->group_stack .rend (); ++group_kw) {
547- chars.push_back (' @' );
548- for (size_t i = 0 ; i < strlen (*group_kw); i++)
549- chars.push_back ((*group_kw)[i]);
550- }
551-
552- char * id_buf = new char [chars.size () + 1 ];
553- unsigned int i = 0 ;
554- for (auto it = chars.begin (); it != chars.end (); ++it)
555- id_buf[i++] = (*it);
556- id_buf[i] = 0 ;
557-
558- identifier_token* new_id = new identifier_token (id_buf, insecure_hash (id_buf));
559-
560- return new_id;
561- }
562-
563537token* lexer::tokenize_value () {
564538 if (last_tok->type == TOKEN_IDENTIFIER) {
565539 identifier_token* identifier = (identifier_token*)last_tok;
@@ -582,6 +556,7 @@ token* lexer::tokenize_value() {
582556 match_tok (last_tok, TOKEN_CLOSE_PARAM);
583557 delete last_tok;
584558 read_token ();
559+ this ->lexer_state ->reference_id (identifier);
585560 return new function_call_token (identifier, arguments);
586561 }
587562 else
@@ -647,6 +622,7 @@ token* lexer::tokenize_value() {
647622 delete last_tok;
648623 match_tok (read_token (), TOKEN_IDENTIFIER);
649624 create_struct_token* new_struct = new create_struct_token ((identifier_token*)last_tok);
625+ this ->lexer_state ->reference_id (new_struct->identifier );
650626 read_token ();
651627 return new_struct;
652628 }
@@ -680,4 +656,29 @@ token* lexer::tokenize_expression(unsigned char min) {
680656 lhs = new binary_operator_token (lhs, rhs, op);
681657 }
682658 return lhs;
659+ }
660+
661+ void group::proc_id (identifier_token* id) {
662+ std::list<char > chars;
663+
664+ for (int i = 0 ; i < strlen (id->get_identifier ()); i++)
665+ chars.push_back (id->get_identifier ()[i]);
666+
667+ group* current = this ;
668+ while (current != nullptr )
669+ {
670+ chars.push_back (' @' );
671+ for (int i = 0 ; i < strlen (identifier->get_identifier ()); i++)
672+ chars.push_back (identifier->get_identifier ()[i]);
673+ current = current->parent ;
674+ }
675+
676+ char * new_buf = new char [chars.size () + 1 ];
677+ unsigned long in = 0 ;
678+ for (auto i = chars.begin (); i != chars.end (); ++i) {
679+ new_buf[in++] = *i;
680+ }
681+ new_buf[in] = 0 ;
682+
683+ id->set_c_str (new_buf);
683684}
0 commit comments