marrub
/
LoveToken
Archived
1
0
Fork 0

Added my email to license, renamed parser.lua to tokenizer.lua, changed all instances of "parser" to "tokenizer", improved tokenizer:getToken() function

pull/1/head
Benjamin Moir 2015-06-06 14:17:23 +09:30
parent 8f637feac8
commit 1d64ae4e33
4 changed files with 137 additions and 130 deletions

View File

@ -1,4 +1,4 @@
Copyright (c) 2015 Benjamin Moir
Copyright (c) 2015 Benjamin Moir <bennyboy.private@hotmail.com.au>
Copyright (c) 2015 Marrub <marrub@greyserv.net>
Permission is hereby granted, free of charge, to any person obtaining a copy

View File

@ -1,6 +1,6 @@
--[[
Copyright (c) 2015 Benjamin Moir
Copyright (c) 2015 Benjamin Moir <bennyboy.private@hotmail.com.au>
Copyright (c) 2015 Marrub <marrub@greyserv.net>
Permission is hereby granted, free of charge, to any person obtaining a copy
@ -24,7 +24,7 @@ THE SOFTWARE.
--]]
local ffi = require("ffi")
local parser = {}
local tokenizer = {}
local loveToken = ffi.load("LoveToken")
ffi.cdef([[
@ -66,16 +66,16 @@ LT_Token LT_GetToken();
local pReturn
function parser:init(initInfo, filePath)
function tokenizer:init(initInfo, filePath)
loveToken.LT_Init(initInfo)
loveToken.LT_OpenFile(filePath)
end
function parser:assert(assertion, str)
function tokenizer:assert(assertion, str)
return loveToken.LT_Assert(assertion, str)
end
function parser:checkError()
function tokenizer:checkError()
ltAssertion = loveToken.LT_CheckAssert()
if (ltAssertion.str == nil) then
@ -85,43 +85,50 @@ function parser:checkError()
end
end
function parser:openFile(filePath)
function tokenizer:openFile(filePath)
pReturn = loveToken.LT_OpenFile(filePath)
parser:checkError()
tokenizer:checkError()
return pReturn
end
function parser:closeFile()
function tokenizer:closeFile()
loveToken.LT_CloseFile()
end
function parser:quit()
function tokenizer:quit()
loveToken.LT_CloseFile()
loveToken.LT_Quit()
end
function parser:readNumber()
function tokenizer:readNumber()
pReturn = loveToken.LT_ReadNumber()
parser:checkError()
tokenizer:checkError()
return ffi.string(pReturn)
end
function parser:readString(term)
function tokenizer:readString(term)
pReturn = loveToken.LT_ReadString(term)
parser:checkError()
tokenizer:checkError()
return ffi.string(pReturn)
end
function parser:escaper(str, pos, escape)
function tokenizer:escaper(str, pos, escape)
pReturn = loveToken.LT_Escaper(str, pos, escape)
parser:checkError()
tokenizer:checkError()
return ffi.string(pReturn)
end
function parser:getToken()
function tokenizer:getToken()
pReturn = loveToken.LT_GetToken()
parser:checkError()
return pReturn
tokenizer:checkError()
local lt = {}
lt.token = ffi.string(pReturn.token)
lt.string = pReturn.string
lt.pos = pReturn.pos
if (pReturn.string ~= nil) then
lt.string = ffi.string(pReturn.string)
end
return lt
end
return parser
return tokenizer

View File

@ -1,5 +1,5 @@
/*
Copyright (c) 2015 Benjamin Moir
Copyright (c) 2015 Benjamin Moir <bennyboy.private@hotmail.com.au>
Copyright (c) 2015 Marrub <marrub@greyserv.net>
Permission is hereby granted, free of charge, to any person obtaining a copy

View File

@ -1,5 +1,5 @@
/*
Copyright (c) 2015 Benjamin Moir
Copyright (c) 2015 Benjamin Moir <bennyboy.private@hotmail.com.au>
Copyright (c) 2015 Marrub <marrub@greyserv.net>
Permission is hereby granted, free of charge, to any person obtaining a copy