Lines Matching refs:tokens

352 	struct token *tokens;  in tokenise()  local
359 token_list = tokens = calloc((end - buffer) / 2, sizeof(struct token)); in tokenise()
360 if (!tokens) { in tokenise()
411 tokens[tix].line = lineno; in tokenise()
424 tokens[tix].size = q - p; in tokenise()
427 tokens[tix].content = malloc(tokens[tix].size + 1); in tokenise()
428 if (!tokens[tix].content) { in tokenise()
432 memcpy(tokens[tix].content, start, tokens[tix].size); in tokenise()
433 tokens[tix].content[tokens[tix].size] = 0; in tokenise()
438 if (islower(tokens[tix].content[0])) { in tokenise()
439 tokens[tix++].token_type = TOKEN_ELEMENT_NAME; in tokenise()
446 dir = bsearch(&tokens[tix], directives, in tokenise()
451 tokens[tix++].token_type = dir - directives; in tokenise()
455 tokens[tix++].token_type = TOKEN_TYPE_NAME; in tokenise()
465 tokens[tix].size = q - p; in tokenise()
467 tokens[tix].content = malloc(tokens[tix].size + 1); in tokenise()
468 if (!tokens[tix].content) { in tokenise()
472 memcpy(tokens[tix].content, start, tokens[tix].size); in tokenise()
473 tokens[tix].content[tokens[tix].size] = 0; in tokenise()
474 tokens[tix++].token_type = TOKEN_NUMBER; in tokenise()
481 tokens[tix].size = 3; in tokenise()
482 tokens[tix].content = "::="; in tokenise()
483 tokens[tix++].token_type = TOKEN_ASSIGNMENT; in tokenise()
491 tokens[tix].size = 2; in tokenise()
492 tokens[tix].content = "({"; in tokenise()
493 tokens[tix++].token_type = TOKEN_OPEN_ACTION; in tokenise()
498 tokens[tix].size = 2; in tokenise()
499 tokens[tix].content = "})"; in tokenise()
500 tokens[tix++].token_type = TOKEN_CLOSE_ACTION; in tokenise()
506 tokens[tix].size = 1; in tokenise()
510 tokens[tix].content = "{"; in tokenise()
511 tokens[tix++].token_type = TOKEN_OPEN_CURLY; in tokenise()
515 tokens[tix].content = "}"; in tokenise()
516 tokens[tix++].token_type = TOKEN_CLOSE_CURLY; in tokenise()
520 tokens[tix].content = "["; in tokenise()
521 tokens[tix++].token_type = TOKEN_OPEN_SQUARE; in tokenise()
525 tokens[tix].content = "]"; in tokenise()
526 tokens[tix++].token_type = TOKEN_CLOSE_SQUARE; in tokenise()
530 tokens[tix].content = ","; in tokenise()
531 tokens[tix++].token_type = TOKEN_COMMA; in tokenise()