Refactor tokenization to save raw tokens and use static buffer space...

...rather than dynamic storage.

As a side effect, this seems to have fixed a very ninor bug in the processing
of the bare word 'nothing'.  But I don't know where the bug was.  Not happy.
This commit is contained in:
Eric S. Raymond 2017-07-03 08:53:10 -04:00
parent eba8015059
commit f03bde268e
4 changed files with 14 additions and 23 deletions

View file

@ -183,6 +183,7 @@ struct command_t {
vocab_t obj;
token_t wd1, wd1x;
token_t wd2, wd2x;
char raw1[BUFSIZ], raw2[BUFSIZ];
};
extern struct game_t game;
@ -192,7 +193,7 @@ extern char* xstrdup(const char* s);
extern void* xmalloc(size_t size);
extern void packed_to_token(long, char token[]);
extern long token_to_packed(const char token[TOKLEN+1]);
extern void tokenize(char*, long tokens[4]);
extern void tokenize(char*, struct command_t *);
extern void vspeak(const char*, bool, va_list);
extern bool wordeq(token_t, token_t);
extern bool wordempty(token_t);

7
main.c
View file

@ -1054,12 +1054,7 @@ L2600:
strncpy(inputbuf, input, LINESIZE - 1);
free(input);
long tokens[4];
tokenize(inputbuf, tokens);
command.wd1 = tokens[0];
command.wd1x = tokens[1];
command.wd2 = tokens[2];
command.wd2x = tokens[3];
tokenize(inputbuf, &command);
/* Every input, check "game.foobar" flag. If zero, nothing's
* going on. If pos, make neg. If neg, he skipped a word,

25
misc.c
View file

@ -100,17 +100,12 @@ long token_to_packed(const char token[TOKLEN+1])
return (packed);
}
void tokenize(char* raw, long tokens[4])
void tokenize(char* raw, struct command_t *cmd)
{
// set each token to 0
for (int i = 0; i < 4; ++i)
tokens[i] = 0;
memset(cmd, '\0', sizeof(struct command_t));
// grab the first two words
char* words[2];
words[0] = (char*) xmalloc(strlen(raw) + 1);
words[1] = (char*) xmalloc(strlen(raw) + 1);
int word_count = sscanf(raw, "%s%s", words[0], words[1]);
/* FIXME: put a bound prefix on the %s to prevent buffer overflow */
int word_count = sscanf(raw, "%s%s", cmd->raw1, cmd->raw2);
// make space for substrings and zero it out
char chunk_data[][TOKLEN+1] = {
@ -121,11 +116,9 @@ void tokenize(char* raw, long tokens[4])
};
// break the words into up to 4 5-char substrings
sscanf(words[0], "%5s%5s", chunk_data[0], chunk_data[1]);
sscanf(cmd->raw1, "%5s%5s", chunk_data[0], chunk_data[1]);
if (word_count == 2)
sscanf(words[1], "%5s%5s", chunk_data[2], chunk_data[3]);
free(words[0]);
free(words[1]);
sscanf(cmd->raw2, "%5s%5s", chunk_data[2], chunk_data[3]);
// uppercase all the substrings
for (int i = 0; i < 4; ++i)
@ -133,8 +126,10 @@ void tokenize(char* raw, long tokens[4])
chunk_data[i][j] = (char) toupper(chunk_data[i][j]);
// pack the substrings
for (int i = 0; i < 4; ++i)
tokens[i] = token_to_packed(chunk_data[i]);
cmd->wd1 = token_to_packed(chunk_data[0]);
cmd->wd1x = token_to_packed(chunk_data[1]);
cmd->wd2 = token_to_packed(chunk_data[2]);
cmd->wd2x = token_to_packed(chunk_data[3]);
}
/* Hide the fact that wods are corrently packed longs */

View file

@ -122,7 +122,7 @@ Say what?
> nothing
Okay, "NOTHI".
OK
> wave