Archived
1
0

apparently these random changes managed to bring tokenizing an 11mb file's time down by 10 seconds on my PC

This commit is contained in:
Marrub 2015-06-06 22:30:15 -04:00
parent 5a199ae8f6
commit 0c758e3f63
2 changed files with 33 additions and 32 deletions

View File

@ -5,7 +5,7 @@ PLFLAGS=
LIBNAME=
OUTDIR=bin
LFLAGS=-shared -g -ggdb
CFLAGS=--std=c99 -g -ggdb -Wall
CFLAGS=--std=c99 -g -ggdb -O2 -Wall
ifeq ($(OS),Windows_NT)
CC+=mingw32-gcc

View File

@ -688,35 +688,42 @@ LT_Token LT_GetToken()
return tk;
}
for(size_t i = 0; i < 6;)
{
char cc = stringChars[i++];
if(cc == '\0')
if(stringChars[0] != '\0')
{
for(size_t i = 0; i < 6;)
{
break;
}
else if(c == cc)
{
tk.string = LT_ReadString(c);
tk.token = LT_TkNames[TOK_String];
return tk;
char cc = stringChars[i++];
if(cc == '\0')
{
break;
}
else if(c == cc)
{
tk.token = LT_TkNames[TOK_String];
tk.string = LT_ReadString(c);
return tk;
}
}
}
for(size_t i = 0; i < 6;)
if(charChars[0] != '\0')
{
char cc = charChars[i++];
for(size_t i = 0; i < 6;)
{
char cc = charChars[i++];
if(cc == '\0')
{
break;
}
else if(c == cc)
{
tk.string = LT_ReadString(c);
tk.token = LT_TkNames[TOK_Charac];
return tk;
if(cc == '\0')
{
break;
}
else if(c == cc)
{
tk.token = LT_TkNames[TOK_Charac];
tk.string = LT_ReadString(c);
return tk;
}
}
}
@ -724,8 +731,8 @@ LT_Token LT_GetToken()
{
ungetc(c, parseFile);
tk.string = LT_ReadNumber();
tk.token = LT_TkNames[TOK_Number];
tk.string = LT_ReadNumber();
return tk;
}
@ -743,11 +750,6 @@ LT_Token LT_GetToken()
str[i++] = c;
if(info.stripInvalid)
{
str[i++] = (isspace(c) || isprint(c)) ? c : ' ';
}
c = fgetc(parseFile);
}
@ -760,19 +762,18 @@ LT_Token LT_GetToken()
ungetc(c, parseFile);
tk.string = LT_SetGarbage(LT_ReAlloc(str, i));
tk.token = LT_TkNames[TOK_Identi];
tk.string = LT_SetGarbage(LT_ReAlloc(str, i));
return tk;
}
tk.token = LT_TkNames[TOK_ChrSeq];
tk.string = LT_Alloc(2);
tk.string[0] = c;
tk.string[1] = '\0';
LT_SetGarbage(tk.string);
tk.token = LT_TkNames[TOK_ChrSeq];
return tk;
}