Skip to content

Commit fac5cf2

Browse files
Fixed issue #18
1 parent 3322356 commit fac5cf2

5 files changed

Lines changed: 122 additions & 55 deletions

File tree

src/lexer.cpp

Lines changed: 44 additions & 43 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@
2828

2929
#define TOKEN_CONST_KW 15 + MAX_TOKEN_LIMIT
3030

31-
#define TOKEN_BEGIN_GROUP 16 + MAX_TOKEN_LIMIT
31+
#define TOKEN_GROUP 16 + MAX_TOKEN_LIMIT
3232
#define TOKEN_END_GROUP 17 + MAX_TOKEN_LIMIT
3333

3434
#define END 18 + MAX_TOKEN_LIMIT
@@ -139,7 +139,7 @@ token* lexer::read_token() {
139139
case 264645514: //break
140140
return last_tok = new token(TOKEN_BREAK);
141141
case 271304754:
142-
return last_tok = new token(TOKEN_BEGIN_GROUP);
142+
return last_tok = new token(TOKEN_GROUP);
143143
case 303295209:
144144
return last_tok = new token(TOKEN_END_GROUP);
145145
default: {
@@ -318,24 +318,24 @@ std::list<token*> lexer::tokenize(bool interactive_mode) {
318318
token* lexer::tokenize_statement(bool interactive_mode) {
319319
switch (last_tok->type)
320320
{
321-
case TOKEN_BEGIN_GROUP: {
321+
case TOKEN_GROUP: {
322322
delete last_tok;
323323
match_tok(read_token(), TOKEN_IDENTIFIER);
324324
identifier_token* id = (identifier_token*)last_tok;
325-
id->no_delete();
326-
lexer_state->group_stack.push_back((char*)id->get_identifier());
327-
delete id;
325+
326+
lexer_state->new_group(id);
327+
328328
read_token();
329+
329330
return nullptr;
330331
}
331-
case TOKEN_END_GROUP:
332-
if (lexer_state->group_stack.empty()) {
333-
throw ERROR_UNEXPECTED_TOKEN;
334-
}
335-
delete[] lexer_state->group_stack.back();
336-
lexer_state->group_stack.pop_back();
332+
case TOKEN_END_GROUP:{
333+
lexer_state->pop_group();
334+
337335
read_token();
338-
return nullptr;
336+
337+
return nullptr;
338+
}
339339
case TOKEN_CONST_KW: {
340340
delete last_tok;
341341
match_tok(read_token(), TOKEN_IDENTIFIER);
@@ -427,7 +427,7 @@ token* lexer::tokenize_statement(bool interactive_mode) {
427427
case TOKEN_STRUCT_KW: {
428428
delete last_tok;
429429
match_tok(read_token(), TOKEN_IDENTIFIER);
430-
identifier_token* proto_id = apply_groups((identifier_token*)last_tok);
430+
identifier_token* proto_id = (identifier_token*)last_tok;
431431
match_tok(read_token(), TOKEN_OPEN_BRACE);
432432
delete last_tok;
433433
std::list<identifier_token*> properties;
@@ -440,12 +440,13 @@ token* lexer::tokenize_statement(bool interactive_mode) {
440440
throw ERROR_UNEXPECTED_END;
441441
delete last_tok;
442442
read_token();
443+
lexer_state->declare_id(proto_id);
443444
return new structure_prototype(proto_id, properties);
444445
}
445446
case TOKEN_FUNC_KW: {
446447
delete last_tok;
447448
match_tok(read_token(), TOKEN_IDENTIFIER);
448-
identifier_token* proto_id = apply_groups((identifier_token*)last_tok);
449+
identifier_token* proto_id = (identifier_token*)last_tok;
449450
match_tok(read_token(), TOKEN_OPEN_PARAM);
450451
delete last_tok;
451452
std::list<identifier_token*> params;
@@ -462,6 +463,7 @@ token* lexer::tokenize_statement(bool interactive_mode) {
462463
throw ERROR_UNEXPECTED_TOKEN;
463464
delete last_tok;
464465
read_token();
466+
lexer_state->declare_id(proto_id);
465467
return new function_prototype(proto_id, params, tokenize_body());
466468
}
467469
case TOKEN_STATIC_KW: {
@@ -532,34 +534,6 @@ variable_access_token* lexer::tokenize_var_access(identifier_token* identifier)
532534
}
533535
return new variable_access_token(toks);
534536
}
535-
536-
identifier_token* lexer::apply_groups(identifier_token* id) {
537-
char* base_id = (char*)id->get_identifier();
538-
id->no_delete();
539-
delete id;
540-
541-
std::list<char> chars;
542-
543-
for (size_t i = 0; i < strlen(base_id); i++)
544-
chars.push_back(base_id[i]);
545-
546-
for (auto group_kw = lexer_state->group_stack.rbegin(); group_kw != lexer_state->group_stack.rend(); ++group_kw) {
547-
chars.push_back('@');
548-
for (size_t i = 0; i < strlen(*group_kw); i++)
549-
chars.push_back((*group_kw)[i]);
550-
}
551-
552-
char* id_buf = new char[chars.size() + 1];
553-
unsigned int i = 0;
554-
for (auto it = chars.begin(); it != chars.end(); ++it)
555-
id_buf[i++] = (*it);
556-
id_buf[i] = 0;
557-
558-
identifier_token* new_id = new identifier_token(id_buf, insecure_hash(id_buf));
559-
560-
return new_id;
561-
}
562-
563537
token* lexer::tokenize_value() {
564538
if (last_tok->type == TOKEN_IDENTIFIER) {
565539
identifier_token* identifier = (identifier_token*)last_tok;
@@ -582,6 +556,7 @@ token* lexer::tokenize_value() {
582556
match_tok(last_tok, TOKEN_CLOSE_PARAM);
583557
delete last_tok;
584558
read_token();
559+
this->lexer_state->reference_id(identifier);
585560
return new function_call_token(identifier, arguments);
586561
}
587562
else
@@ -647,6 +622,7 @@ token* lexer::tokenize_value() {
647622
delete last_tok;
648623
match_tok(read_token(), TOKEN_IDENTIFIER);
649624
create_struct_token* new_struct = new create_struct_token((identifier_token*)last_tok);
625+
this->lexer_state->reference_id(new_struct->identifier);
650626
read_token();
651627
return new_struct;
652628
}
@@ -680,4 +656,29 @@ token* lexer::tokenize_expression(unsigned char min) {
680656
lhs = new binary_operator_token(lhs, rhs, op);
681657
}
682658
return lhs;
659+
}
660+
661+
void group::proc_id(identifier_token* id) {
662+
std::list<char> chars;
663+
664+
for (int i = 0; i < strlen(id->get_identifier()); i++)
665+
chars.push_back(id->get_identifier()[i]);
666+
667+
group* current = this;
668+
while (current != nullptr)
669+
{
670+
chars.push_back('@');
671+
for (int i = 0; i < strlen(identifier->get_identifier()); i++)
672+
chars.push_back(identifier->get_identifier()[i]);
673+
current = current->parent;
674+
}
675+
676+
char* new_buf = new char[chars.size() + 1];
677+
unsigned long in = 0;
678+
for (auto i = chars.begin(); i != chars.end(); ++i) {
679+
new_buf[in++] = *i;
680+
}
681+
new_buf[in] = 0;
682+
683+
id->set_c_str(new_buf);
683684
}

src/lexer.h

Lines changed: 61 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -10,15 +10,72 @@
1010
#include "builtins.h"
1111
#include "tokens.h"
1212

13+
struct group {
14+
private:
15+
std::set<unsigned long> declerations;
16+
std::list<identifier_token*> to_process;
17+
identifier_token* identifier;
18+
19+
void proc_id(identifier_token* id);
20+
21+
public:
22+
group* parent;
23+
24+
group(struct identifier_token* identifier, group* parent = nullptr) {
25+
this->identifier = identifier;
26+
this->parent = parent;
27+
}
28+
29+
inline void proc_decleration(identifier_token* id) {
30+
declerations.insert(id->id_hash);
31+
proc_id(id);
32+
}
33+
34+
inline void proc_reference(identifier_token* id) {
35+
to_process.push_back(id);
36+
}
37+
38+
~group() {
39+
for (auto i = to_process.begin(); i != to_process.end(); ++i) {
40+
if (declerations.count((*i)->id_hash))
41+
proc_id(*i);
42+
}
43+
}
44+
};
45+
1346
struct lexer_state {
47+
private:
48+
group* top_group = nullptr;
49+
public:
1450
std::map<unsigned long, value*> constants;
15-
std::list<char*> group_stack;
16-
51+
1752
~lexer_state() {
1853
for (auto it = this->constants.begin(); it != this->constants.end(); ++it)
1954
delete (*it).second;
20-
for (auto it = this->group_stack.begin(); it != this->group_stack.end(); ++it)
21-
delete (*it);
55+
}
56+
57+
inline void declare_id(identifier_token* id) {
58+
if (top_group != nullptr)
59+
top_group->proc_decleration(id);
60+
}
61+
62+
inline void reference_id(identifier_token* id) {
63+
if (top_group != nullptr)
64+
top_group->proc_reference(id);
65+
}
66+
67+
inline group* current_group() {
68+
return this->top_group;
69+
}
70+
71+
inline void new_group(identifier_token* identifier) {
72+
top_group = new group(identifier, top_group);
73+
}
74+
75+
inline void pop_group() {
76+
group* to_delete = top_group;
77+
top_group = top_group->parent;
78+
delete to_delete;
2279
}
2380
};
2481

@@ -62,7 +119,6 @@ class lexer {
62119
token* read_token();
63120
token* tokenize_statement(bool interactive_mode);
64121
std::list<token*> tokenize_body();
65-
identifier_token* apply_groups(identifier_token* id);
66122
variable_access_token* tokenize_var_access();
67123
variable_access_token* tokenize_var_access(identifier_token* identifier);
68124
token* tokenize_expression(unsigned char min = 0);

src/runtime.cpp

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -270,9 +270,9 @@ value_eval* interpreter::evaluate(token* eval_tok, bool force_reference) {
270270
}
271271
case TOKEN_CREATE_STRUCT: {
272272
create_struct_token* create_struct = (create_struct_token*)eval_tok;
273-
if (!struct_definitions.count(create_struct->prototype_identifier->id_hash))
273+
if (!struct_definitions.count(create_struct->identifier->id_hash))
274274
throw ERROR_STRUCT_PROTO_NOT_DEFINED;
275-
structure* created_struct = new structure(struct_definitions[create_struct->prototype_identifier->id_hash], &garbage_collector);
275+
structure* created_struct = new structure(struct_definitions[create_struct->identifier->id_hash], &garbage_collector);
276276
return new value_eval(created_struct->get_parent_ref());
277277
}
278278
case TOKEN_CREATE_ARRAY: {

src/tokens.cpp

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -235,12 +235,12 @@ create_array_token::~create_array_token() {
235235
destroy_value_tok(*i);
236236
}
237237

238-
create_struct_token::create_struct_token(identifier_token* prototype_identifier) : token(TOKEN_CREATE_STRUCT){
239-
this->prototype_identifier = prototype_identifier;
238+
create_struct_token::create_struct_token(identifier_token* identifier) : token(TOKEN_CREATE_STRUCT){
239+
this->identifier = identifier;
240240
}
241241

242242
create_struct_token::~create_struct_token() {
243-
delete this->prototype_identifier;
243+
delete this->identifier;
244244
}
245245

246246
function_prototype::function_prototype(identifier_token* identifier, std::list<identifier_token*> argument_identifiers, std::list<token*> tokens) : token(TOKEN_FUNC_PROTO) {

src/tokens.h

Lines changed: 12 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
#include <list>
77
#include "errors.h"
88
#include "value.h"
9+
#include "hash.h"
910

1011

1112
//value and accessor tokens 0-4
@@ -69,6 +70,15 @@ struct identifier_token : token {
6970
inline const char* get_identifier() {
7071
return (const char*)this->id_str_ptr;
7172
}
73+
74+
inline void set_c_str(char* id_str) {
75+
if(delete_id)
76+
delete[] this->id_str_ptr;
77+
delete_id = true;
78+
this->id_str_ptr = id_str;
79+
this->id_hash = insecure_hash(id_str);
80+
}
81+
7282
unsigned long id_hash;
7383
identifier_token(const char* identifier);
7484
identifier_token(char* identifier, unsigned long id_hash, bool delete_id = true);
@@ -142,8 +152,8 @@ struct create_array_token :token {
142152
};
143153

144154
struct create_struct_token :token {
145-
identifier_token* prototype_identifier;
146-
create_struct_token(identifier_token* prototype_identifier);
155+
identifier_token* identifier;
156+
create_struct_token(identifier_token* identifier);
147157
~create_struct_token();
148158
};
149159

0 commit comments

Comments
 (0)