Browse Source

add string field

master
nisstyre56 10 years ago
parent
commit
1fd19a37d1
  1. 12
      tokenize.c
  2. 3
      tokenize.py

12
tokenize.c

@ -102,7 +102,7 @@ push_token(token_stream *tokens,
/* We've reached the maximum stack size /* We've reached the maximum stack size
* So we must try to increase that by GROWTH_SIZE * So we must try to increase that by GROWTH_SIZE
*/ */
token_t *new_tokens = xrealloc(tokens->tokens, sizeof(token_t) * (max + GROWTH_SIZE)); token_t *new_tokens = xrealloc(tokens->tokens, sizeof (token_t) * (max + GROWTH_SIZE));
if (!new_tokens) { if (!new_tokens) {
printf("Could not allocate enough memory for the token stack\n"); printf("Could not allocate enough memory for the token stack\n");
exit(EXIT_FAILURE); exit(EXIT_FAILURE);
@ -311,7 +311,7 @@ tokenize(source_t source,
const char *current_token_val; const char *current_token_val;
token_stream token_stack; token_stream token_stack;
token_val_t current_token; token_val_t current_token;
token_t *tokens = xcalloc(STACK_SIZE, sizeof(token_t)); token_t *tokens = xcalloc(STACK_SIZE, sizeof (token_t));
hsh_HashTable token_memo = hsh_create(NULL, NULL); hsh_HashTable token_memo = hsh_create(NULL, NULL);
@ -353,7 +353,7 @@ tokenize(source_t source,
else { else {
source[position] = lookahead; source[position] = lookahead;
assert(position > begin); assert(position > begin);
current_token_val = xcalloc(((position - begin) + 1), sizeof(char)); current_token_val = xcalloc(((position - begin) + 1), sizeof (char));
CHECK(current_token_val); CHECK(current_token_val);
extract_token(position, begin, source, current_token_val); extract_token(position, begin, source, current_token_val);
hsh_insert(token_stack.memo, current_token_val, current_token_val); hsh_insert(token_stack.memo, current_token_val, current_token_val);
@ -374,7 +374,7 @@ tokenize(source_t source,
assert(position <= length); assert(position <= length);
source[position] = lookahead; source[position] = lookahead;
current_token_val = xcalloc(((position - begin) + 1), sizeof(char)); current_token_val = xcalloc(((position - begin) + 1), sizeof (char));
CHECK(current_token_val); CHECK(current_token_val);
extract_token(position, begin, source, current_token_val); extract_token(position, begin, source, current_token_val);
hsh_insert(token_stack.memo, current_token_val, current_token_val); hsh_insert(token_stack.memo, current_token_val, current_token_val);
@ -395,7 +395,7 @@ tokenize(source_t source,
assert(position <= length); assert(position <= length);
source[position] = lookahead; source[position] = lookahead;
current_token_val = xcalloc(((position - begin) + 1), sizeof(char)); current_token_val = xcalloc(((position - begin) + 1), sizeof (char));
CHECK(current_token_val); CHECK(current_token_val);
extract_token(position, begin, source, current_token_val); extract_token(position, begin, source, current_token_val);
hsh_insert(token_stack.memo, current_token_val, current_token_val); hsh_insert(token_stack.memo, current_token_val, current_token_val);
@ -421,7 +421,7 @@ tokenize(source_t source,
assert(position <= length); assert(position <= length);
source[position] = lookahead; source[position] = lookahead;
current_token_val = xcalloc(((position - begin) + 1), sizeof(char)); current_token_val = xcalloc(((position - begin) + 1), sizeof (char));
CHECK(current_token_val); CHECK(current_token_val);
extract_token(position, begin, source, current_token_val); extract_token(position, begin, source, current_token_val);
hsh_insert(token_stack.memo, current_token_val, current_token_val); hsh_insert(token_stack.memo, current_token_val, current_token_val);

3
tokenize.py

@ -18,6 +18,7 @@ class TokenValT(Union):
("integer", c_char_p), ("integer", c_char_p),
("floating", c_char_p), ("floating", c_char_p),
("parenthesis", c_char_p), ("parenthesis", c_char_p),
("string", c_char_p),
("quote", c_bool), ("quote", c_bool),
("whitespace", c_bool), ("whitespace", c_bool),
("null_token", c_bool)] ("null_token", c_bool)]
@ -49,4 +50,4 @@ def tokenize(source):
line = " '''' a b" line = " '''' a b"
xs = list(tokenize(line)) xs = list(tokenize(line))
print xs print(xs)