Skip to content

Commit

Permalink
Add support for Lua 5.2
Browse files Browse the repository at this point in the history
Includes aliases to functions deprecated in Lua 5.2, such as
math.log10 and unpack, and loadstring
  • Loading branch information
colesbury committed May 29, 2015
1 parent 8a7dc48 commit 18fb209
Show file tree
Hide file tree
Showing 20 changed files with 80 additions and 31 deletions.
2 changes: 1 addition & 1 deletion DiskFile.c
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,6 @@ void torch_DiskFile_init(lua_State *L)
luaT_newmetatable(L, "torch.DiskFile", "torch.File",
torch_DiskFile_new, torch_DiskFile_free, NULL);

luaL_register(L, NULL, torch_DiskFile__);
luaT_setfuncs(L, torch_DiskFile__, 0);
lua_pop(L, 1);
}
2 changes: 1 addition & 1 deletion File.c
Original file line number Diff line number Diff line change
Expand Up @@ -199,6 +199,6 @@ static const struct luaL_Reg torch_File__ [] = {
void torch_File_init(lua_State *L)
{
luaT_newmetatable(L, "torch.File", NULL, NULL, NULL, NULL);
luaL_register(L, NULL, torch_File__);
luaT_setfuncs(L, torch_File__, 0);
lua_pop(L, 1);
}
3 changes: 3 additions & 0 deletions File.lua
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,9 @@ local TYPE_BOOLEAN = 5
local TYPE_FUNCTION = 6
local TYPE_RECUR_FUNCTION = 7

-- Lua 5.2 compatibility
local loadstring = loadstring or load

function File:isWritableObject(object)
local typename = type(object)
local typeidx
Expand Down
2 changes: 1 addition & 1 deletion Generator.c
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,6 @@ void torch_Generator_init(lua_State *L)
{
luaT_newmetatable(L, torch_Generator, NULL,
torch_Generator_new, torch_Generator_free, torch_Generator_factory);
luaL_register(L, NULL, torch_Generator_table_);
luaT_setfuncs(L, torch_Generator_table_, 0);
lua_pop(L, 1);
}
2 changes: 1 addition & 1 deletion MemoryFile.c
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,6 @@ void torch_MemoryFile_init(lua_State *L)
{
luaT_newmetatable(L, "torch.MemoryFile", "torch.File",
torch_MemoryFile_new, torch_MemoryFile_free, NULL);
luaL_register(L, NULL, torch_MemoryFile__);
luaT_setfuncs(L, torch_MemoryFile__, 0);
lua_pop(L, 1);
}
2 changes: 1 addition & 1 deletion PipeFile.c
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,6 @@ void torch_PipeFile_init(lua_State *L)
{
luaT_newmetatable(L, "torch.PipeFile", "torch.DiskFile",
torch_PipeFile_new, torch_PipeFile_free, NULL);
luaL_register(L, NULL, torch_PipeFile__);
luaT_setfuncs(L, torch_PipeFile__, 0);
lua_pop(L, 1);
}
7 changes: 5 additions & 2 deletions Tensor.lua
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,9 @@ local Tensor = {}
-- types
local types = {'Byte', 'Char', 'Short', 'Int', 'Long', 'Float', 'Double'}

-- Lua 5.2 compatibility
local log10 = math.log10 or function(x) return math.log(x, 10) end

-- tostring() functions for Tensor and Storage
local function Storage__printformat(self)
if self:size() == 0 then
Expand All @@ -25,13 +28,13 @@ local function Storage__printformat(self)
local tensor = torch.DoubleTensor(torch.DoubleStorage(self:size()):copy(self), 1, self:size()):abs()
local expMin = tensor:min()
if expMin ~= 0 then
expMin = math.floor(math.log10(expMin)) + 1
expMin = math.floor(log10(expMin)) + 1
else
expMin = 1
end
local expMax = tensor:max()
if expMax ~= 0 then
expMax = math.floor(math.log10(expMax)) + 1
expMax = math.floor(log10(expMax)) + 1
else
expMax = 1
end
Expand Down
9 changes: 5 additions & 4 deletions TensorMath.lua
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,8 @@ local function wrap(...)
end
end
end
method:wrap(unpack(args))
local unpack = unpack or table.unpack
method:wrap(unpack(args))
end

local reals = {ByteTensor='unsigned char',
Expand Down Expand Up @@ -1133,12 +1134,12 @@ static void torch_TensorMath_init(lua_State *L)
luaT_pushmetatable(L, "torch.Tensor");
/* register methods */
luaL_register(L, NULL, m_torch_TensorMath__);
luaT_setfuncs(L, m_torch_TensorMath__, 0);
/* register functions into the "torch" field of the tensor metaclass */
lua_pushstring(L, "torch");
lua_newtable(L);
luaL_register(L, NULL, torch_TensorMath__);
luaT_setfuncs(L, torch_TensorMath__, 0);
lua_rawset(L, -3);
lua_pop(L, 1);
}
Expand All @@ -1157,7 +1158,7 @@ void torch_TensorMath_init(lua_State *L)
torch_LongTensorMath_init(L);
torch_FloatTensorMath_init(L);
torch_DoubleTensorMath_init(L);
luaL_register(L, NULL, torch_TensorMath__);
luaT_setfuncs(L, torch_TensorMath__, 0);
}
]])

Expand Down
2 changes: 1 addition & 1 deletion Timer.c
Original file line number Diff line number Diff line change
Expand Up @@ -165,6 +165,6 @@ static const struct luaL_Reg torch_Timer__ [] = {
void torch_Timer_init(lua_State *L)
{
luaT_newmetatable(L, "torch.Timer", NULL, torch_Timer_new, torch_Timer_free, NULL);
luaL_register(L, NULL, torch_Timer__);
luaT_setfuncs(L, torch_Timer__, 0);
lua_pop(L, 1);
}
2 changes: 1 addition & 1 deletion generic/Storage.c
Original file line number Diff line number Diff line change
Expand Up @@ -273,7 +273,7 @@ void torch_Storage_(init)(lua_State *L)
{
luaT_newmetatable(L, torch_Storage, NULL,
torch_Storage_(new), torch_Storage_(free), torch_Storage_(factory));
luaL_register(L, NULL, torch_Storage_(_));
luaT_setfuncs(L, torch_Storage_(_), 0);
lua_pop(L, 1);
}

Expand Down
2 changes: 1 addition & 1 deletion generic/Tensor.c
Original file line number Diff line number Diff line change
Expand Up @@ -1278,7 +1278,7 @@ void torch_Tensor_(init)(lua_State *L)
{
luaT_newmetatable(L, torch_Tensor, NULL,
torch_Tensor_(new), torch_Tensor_(free), torch_Tensor_(factory));
luaL_register(L, NULL, torch_Tensor_(_));
luaT_setfuncs(L, torch_Tensor_(_), 0);
lua_pop(L, 1);
}

Expand Down
2 changes: 1 addition & 1 deletion generic/TensorOperator.c
Original file line number Diff line number Diff line change
Expand Up @@ -166,7 +166,7 @@ static const struct luaL_Reg torch_TensorOperator_(_) [] = {
void torch_TensorOperator_(init)(lua_State *L)
{
luaT_pushmetatable(L, torch_Tensor);
luaL_register(L, NULL, torch_TensorOperator_(_));
luaT_setfuncs(L, torch_TensorOperator_(_), 0);
lua_pop(L, 1);
}

Expand Down
2 changes: 1 addition & 1 deletion init.c
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ int luaopen_libtorch(lua_State *L)

lua_newtable(L);
lua_pushvalue(L, -1);
lua_setfield(L, LUA_GLOBALSINDEX, "torch");
lua_setglobal(L, "torch");

torch_File_init(L);

Expand Down
3 changes: 3 additions & 0 deletions init.lua
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,9 @@
if not string.gfind then
string.gfind = string.gmatch
end
if not table.unpack then
table.unpack = unpack
end

require "paths"
paths.require "libtorch"
Expand Down
38 changes: 29 additions & 9 deletions lib/luaT/luaT.c
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,24 @@ void luaT_free(lua_State *L, void *ptr)
free(ptr);
}

void luaT_setfuncs(lua_State *L, const luaL_Reg *l, int nup)
{
#if LUA_VERSION_NUM == 501
luaL_checkstack(L, nup+1, "too many upvalues");
for (; l->name != NULL; l++) { /* fill the table with given functions */
int i;
lua_pushstring(L, l->name);
for (i = 0; i < nup; i++) /* copy upvalues to the top */
lua_pushvalue(L, -(nup+1));
lua_pushcclosure(L, l->func, nup); /* closure with those upvalues */
lua_settable(L, -(nup + 3));
}
lua_pop(L, nup); /* remove upvalues */
#else
luaL_setfuncs(L, l, nup);
#endif
}

void luaT_stackdump(lua_State *L)
{
int i;
Expand Down Expand Up @@ -159,8 +177,8 @@ const char *luaT_typenameid(lua_State *L, const char *tname)
}

static const char cdataname[] = ""
"local _, ffi = pcall(require, 'ffi')\n"
"if ffi then\n"
"local ok, ffi = pcall(require, 'ffi')\n"
"if ok then\n"
" local id2name = {}\n"
" return function(cdata, name)\n"
" local id\n"
Expand Down Expand Up @@ -215,8 +233,8 @@ static const char* luaT_cdataname(lua_State *L, int ud, const char *tname)

static void* CDATA_MT_KEY = &CDATA_MT_KEY;
static const char cdatamt[] = ""
"local _, ffi = pcall(require, 'ffi')\n"
"if ffi and not jit then\n"
"local ok, ffi = pcall(require, 'ffi')\n"
"if ok and not jit then\n"
" return ffi.debug().cdata_mt\n"
"else\n"
" return {}\n"
Expand Down Expand Up @@ -448,7 +466,7 @@ void luaT_registeratname(lua_State *L, const struct luaL_Reg *methods, const cha
lua_rawget(L, idx);
}

luaL_register(L, NULL, methods);
luaT_setfuncs(L, methods, 0);
lua_pop(L, 1);
}

Expand Down Expand Up @@ -494,9 +512,9 @@ int luaT_lua_newmetatable(lua_State *L)
luaL_argcheck(L, lua_isnoneornil(L, 5) || lua_isfunction(L, 5), 5, "factory function or nil expected");

if(is_in_module)
lua_getfield(L, LUA_GLOBALSINDEX, module_name);
lua_getglobal(L, module_name);
else
lua_pushvalue(L, LUA_GLOBALSINDEX);
lua_pushglobaltable(L);
if(!lua_istable(L, 6))
luaL_error(L, "while creating metatable %s: bad argument #1 (%s is an invalid module name)", tname, module_name);

Expand Down Expand Up @@ -853,15 +871,17 @@ int luaT_lua_setenv(lua_State *L)
if(!lua_isfunction(L, 1) && !lua_isuserdata(L, 1))
luaL_typerror(L, 1, "function or userdata");
luaL_checktype(L, 2, LUA_TTABLE);
lua_setfenv(L, 1);
lua_setuservalue(L, 1);
return 0;
}

int luaT_lua_getenv(lua_State *L)
{
if(!lua_isfunction(L, 1) && !lua_isuserdata(L, 1))
luaL_typerror(L, 1, "function or userdata");
lua_getfenv(L, 1);
lua_getuservalue(L, 1);
if (lua_isnil(L, -1))
lua_newtable(L);
return 1;
}

Expand Down
14 changes: 14 additions & 0 deletions lib/luaT/luaT.h
Original file line number Diff line number Diff line change
Expand Up @@ -32,13 +32,27 @@ extern "C" {
# define LUAT_API LUA_EXTERNC
#endif

#if LUA_VERSION_NUM == 501
# define lua_pushglobaltable(L) lua_pushvalue(L, LUA_GLOBALSINDEX)
# define lua_setuservalue lua_setfenv
# define lua_getuservalue lua_getfenv
#else
# define lua_objlen lua_rawlen
static int luaL_typerror(lua_State *L, int narg, const char *tname)
{
return luaL_error(L, "%s expected, got %s", tname, luaL_typename(L, narg));
}
#endif


/* C functions */

LUAT_API void* luaT_alloc(lua_State *L, long size);
LUAT_API void* luaT_realloc(lua_State *L, void *ptr, long size);
LUAT_API void luaT_free(lua_State *L, void *ptr);

LUAT_API void luaT_setfuncs(lua_State *L, const luaL_Reg *l, int nup);

LUAT_API const char* luaT_newmetatable(lua_State *L, const char *tname, const char *parenttname,
lua_CFunction constructor, lua_CFunction destructor, lua_CFunction factory);

Expand Down
2 changes: 1 addition & 1 deletion random.lua
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ void torch_random_init(lua_State *L)
torch_Generator_init(L);
torch_Generator_new(L);
lua_setfield(L, -2, "_gen");
luaL_register(L, NULL, random__);
luaT_setfuncs(L, random__, 0);
}
]])

Expand Down
4 changes: 4 additions & 0 deletions test/test.lua
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,10 @@ local torchtest = {}
local msize = 100
local precision

-- Lua 5.2 compatibility
local loadstring = loadstring or load
local unpack = unpack or table.unpack

local function maxdiff(x,y)
local d = x-y
if x:type() == 'torch.DoubleTensor' or x:type() == 'torch.FloatTensor' then
Expand Down
7 changes: 4 additions & 3 deletions test/timeSort.lua
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,8 @@ cmd:option('-r', 20, 'Number of repetitions')

local options = cmd:parse(arg or {})
function main()
local pow10 = torch.linspace(1,math.log10(options.N), options.p)
local log10 = math.log10 or function(x) return math.log(x, 10) end
local pow10 = torch.linspace(1,log10(options.N), options.p)
local num_sizes = options.p
local num_reps = options.r

Expand Down Expand Up @@ -128,11 +129,11 @@ function main()
gnuplot.xlabel('N')
gnuplot.ylabel('Speed-up Factor (s)')
gnuplot.figprint('benchmarkRatio.png')

torch.save('benchmark.t7', {
new_rnd=new_rnd,
new_srt=new_srt,
new_cst=new_cst,
new_cst=new_cst,
old_rnd=old_rnd,
old_srt=old_srt,
old_cst=old_cst,
Expand Down
4 changes: 2 additions & 2 deletions utils.c
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ static int torch_lua_getdefaulttensortype(lua_State *L)

const char* torch_getdefaulttensortype(lua_State *L)
{
lua_getfield(L, LUA_GLOBALSINDEX, "torch");
lua_getglobal(L, "torch");
if(lua_istable(L, -1))
{
lua_getfield(L, -1, "Tensor");
Expand Down Expand Up @@ -214,5 +214,5 @@ static const struct luaL_Reg torch_utils__ [] = {

void torch_utils_init(lua_State *L)
{
luaL_register(L, NULL, torch_utils__);
luaT_setfuncs(L, torch_utils__, 0);
}

0 comments on commit 18fb209

Please sign in to comment.