Compatible Android

This commit is contained in:
Andros Fenollosa
2016-11-03 00:05:36 +01:00
parent 7cb6af1390
commit 8ec8327e5e
1793 changed files with 440698 additions and 7 deletions

View File

@ -0,0 +1,636 @@
-------------------------------------------------------------------------------
-- Copas - Coroutine Oriented Portable Asynchronous Services
--
-- A dispatcher based on coroutines that can be used by TCP/IP servers.
-- Uses LuaSocket as the interface with the TCP/IP stack.
--
-- Authors: Andre Carregal, Javier Guerra, and Fabio Mascarenhas
-- Contributors: Diego Nehab, Mike Pall, David Burgess, Leonardo Godinho,
-- Thomas Harning Jr., and Gary NG
--
-- Copyright 2005 - Kepler Project (www.keplerproject.org)
--
-- $Id: copas.lua,v 1.37 2009/04/07 22:09:52 carregal Exp $
-------------------------------------------------------------------------------
if package.loaded["socket.http"] then
error("you must require copas before require'ing socket.http")
end
local socket = require "socket"
local gettime = socket.gettime
local coxpcall = require "coxpcall"
local WATCH_DOG_TIMEOUT = 120
local UDP_DATAGRAM_MAX = 8192
-- Redefines LuaSocket functions with coroutine safe versions
-- (this allows the use of socket.http from within copas)
local function statusHandler(status, ...)
if status then return ... end
local err = (...)
if type(err) == "table" then
return nil, err[1]
else
error(err)
end
end
function socket.protect(func)
return function (...)
return statusHandler(coxpcall.pcall(func, ...))
end
end
function socket.newtry(finalizer)
return function (...)
local status = (...)
if not status then
coxpcall.pcall(finalizer, select(2, ...))
error({ (select(2, ...)) }, 0)
end
return ...
end
end
-- end of LuaSocket redefinitions
local copas = {}
-- Meta information is public even if beginning with an "_"
copas._COPYRIGHT = "Copyright (C) 2005-2010 Kepler Project"
copas._DESCRIPTION = "Coroutine Oriented Portable Asynchronous Services"
copas._VERSION = "Copas 1.2.1"
-- Close the socket associated with the current connection after the handler finishes
copas.autoclose = true
-------------------------------------------------------------------------------
-- Simple set implementation based on LuaSocket's tinyirc.lua example
-- adds a FIFO queue for each value in the set
-------------------------------------------------------------------------------
local function newset()
local reverse = {}
local set = {}
local q = {}
setmetatable(set, { __index = {
insert = function(set, value)
if not reverse[value] then
set[#set + 1] = value
reverse[value] = #set
end
end,
remove = function(set, value)
local index = reverse[value]
if index then
reverse[value] = nil
local top = set[#set]
set[#set] = nil
if top ~= value then
reverse[top] = index
set[index] = top
end
end
end,
push = function (set, key, itm)
local qKey = q[key]
if qKey == nil then
q[key] = {itm}
else
qKey[#qKey + 1] = itm
end
end,
pop = function (set, key)
local t = q[key]
if t ~= nil then
local ret = table.remove (t, 1)
if t[1] == nil then
q[key] = nil
end
return ret
end
end
}})
return set
end
local fnil = function()end
local _sleeping = {
times = {}, -- list with wake-up times
cos = {}, -- list with coroutines, index matches the 'times' list
lethargy = {}, -- list of coroutines sleeping without a wakeup time
insert = fnil,
remove = fnil,
push = function(self, sleeptime, co)
if not co then return end
if sleeptime<0 then
--sleep until explicit wakeup through copas.wakeup
self.lethargy[co] = true
return
else
sleeptime = gettime() + sleeptime
end
local t, c = self.times, self.cos
local i, cou = 1, #t
--TODO: do a binary search
while i<=cou and t[i]<=sleeptime do i=i+1 end
table.insert(t, i, sleeptime)
table.insert(c, i, co)
end,
getnext = function(self) -- returns delay until next sleep expires, or nil if there is none
local t = self.times
local delay = t[1] and t[1] - gettime() or nil
return delay and math.max(delay, 0) or nil
end,
-- find the thread that should wake up to the time
pop = function(self, time)
local t, c = self.times, self.cos
if #t==0 or time<t[1] then return end
local co = c[1]
table.remove(t, 1)
table.remove(c, 1)
return co
end,
wakeup = function(self, co)
local let = self.lethargy
if let[co] then
self:push(0, co)
let[co] = nil
else
let = self.cos
for i=1,#let do
if let[i]==co then
table.remove(let, i)
local tm = self.times[i]
table.remove(self.times, i)
self:push(0, co)
return
end
end
end
end
} --_sleeping
local _servers = newset() -- servers being handled
local _reading_log = {}
local _writing_log = {}
local _reading = newset() -- sockets currently being read
local _writing = newset() -- sockets currently being written
-------------------------------------------------------------------------------
-- Coroutine based socket I/O functions.
-------------------------------------------------------------------------------
-- reads a pattern from a client and yields to the reading set on timeouts
-- UDP: a UDP socket expects a second argument to be a number, so it MUST
-- be provided as the 'pattern' below defaults to a string. Will throw a
-- 'bad argument' error if omitted.
function copas.receive(client, pattern, part)
local s, err
pattern = pattern or "*l"
repeat
s, err, part = client:receive(pattern, part)
if s or err ~= "timeout" then
_reading_log[client] = nil
return s, err, part
end
_reading_log[client] = gettime()
coroutine.yield(client, _reading)
until false
end
-- receives data from a client over UDP. Not available for TCP.
-- (this is a copy of receive() method, adapted for receivefrom() use)
function copas.receivefrom(client, size)
local s, err, port
size = size or UDP_DATAGRAM_MAX
repeat
s, err, port = client:receivefrom(size) -- upon success err holds ip address
if s or err ~= "timeout" then
_reading_log[client] = nil
return s, err, port
end
_reading_log[client] = gettime()
coroutine.yield(client, _reading)
until false
end
-- same as above but with special treatment when reading chunks,
-- unblocks on any data received.
function copas.receivePartial(client, pattern, part)
local s, err
pattern = pattern or "*l"
repeat
s, err, part = client:receive(pattern, part)
if s or ( (type(pattern)=="number") and part~="" and part ~=nil ) or
err ~= "timeout" then
_reading_log[client] = nil
return s, err, part
end
_reading_log[client] = gettime()
coroutine.yield(client, _reading)
until false
end
-- sends data to a client. The operation is buffered and
-- yields to the writing set on timeouts
-- Note: from and to parameters will be ignored by/for UDP sockets
function copas.send(client, data, from, to)
local s, err,sent
from = from or 1
local lastIndex = from - 1
repeat
s, err, lastIndex = client:send(data, lastIndex + 1, to)
-- adds extra corrotine swap
-- garantees that high throuput dont take other threads to starvation
if (math.random(100) > 90) then
_writing_log[client] = gettime()
coroutine.yield(client, _writing)
end
if s or err ~= "timeout" then
_writing_log[client] = nil
return s, err,lastIndex
end
_writing_log[client] = gettime()
coroutine.yield(client, _writing)
until false
end
-- sends data to a client over UDP. Not available for TCP.
-- (this is a copy of send() method, adapted for sendto() use)
function copas.sendto(client, data, ip, port)
local s, err,sent
repeat
s, err = client:sendto(data, ip, port)
-- adds extra corrotine swap
-- garantees that high throuput dont take other threads to starvation
if (math.random(100) > 90) then
_writing_log[client] = gettime()
coroutine.yield(client, _writing)
end
if s or err ~= "timeout" then
_writing_log[client] = nil
return s, err
end
_writing_log[client] = gettime()
coroutine.yield(client, _writing)
until false
end
-- waits until connection is completed
function copas.connect(skt, host, port)
skt:settimeout(0)
local ret, err
repeat
ret, err = skt:connect (host, port)
if ret or err ~= "timeout" then
_writing_log[skt] = nil
return ret, err
end
_writing_log[skt] = gettime()
coroutine.yield(skt, _writing)
until false
return ret, err
end
-- flushes a client write buffer (deprecated)
function copas.flush(client)
end
-- wraps a TCP socket to use Copas methods (send, receive, flush and settimeout)
local _skt_mt = {__index = {
send = function (self, data, from, to)
return copas.send (self.socket, data, from, to)
end,
receive = function (self, pattern, prefix)
if (self.timeout==0) then
return copas.receivePartial(self.socket, pattern, prefix)
end
return copas.receive(self.socket, pattern, prefix)
end,
flush = function (self)
return copas.flush(self.socket)
end,
settimeout = function (self,time)
self.timeout=time
return true
end,
skip = function(self, ...) return self.socket:skip(...) end,
close = function(self, ...) return self.socket:close(...) end,
}}
-- wraps a UDP socket, copy of TCP one adapted for UDP.
-- Mainly adds sendto() and receivefrom()
local _skt_mt_udp = {__index = {
send = function (self, data)
return copas.send (self.socket, data)
end,
sendto = function (self, data, ip, port)
return copas.sendto (self.socket, data, ip, port)
end,
receive = function (self, size)
return copas.receive (self.socket, (size or UDP_DATAGRAM_MAX))
end,
receivefrom = function (self, size)
return copas.receivefrom (self.socket, (size or UDP_DATAGRAM_MAX))
end,
flush = function (self)
return copas.flush (self.socket)
end,
settimeout = function (self,time)
self.timeout=time
return true
end,
}}
function copas.wrap (skt)
if string.sub(tostring(skt),1,3) == "udp" then
return setmetatable ({socket = skt}, _skt_mt_udp)
else
return setmetatable ({socket = skt}, _skt_mt)
end
end
--------------------------------------------------
-- Error handling
--------------------------------------------------
local _errhandlers = {} -- error handler per coroutine
function copas.setErrorHandler (err)
local co = coroutine.running()
if co then
_errhandlers [co] = err
end
end
local function _deferror (msg, co, skt)
print (msg, co, skt)
end
-------------------------------------------------------------------------------
-- Thread handling
-------------------------------------------------------------------------------
local function _doTick (co, skt, ...)
if not co then return end
local ok, res, new_q = coroutine.resume(co, skt, ...)
if ok and res and new_q then
new_q:insert (res)
new_q:push (res, co)
else
if not ok then coxpcall.pcall (_errhandlers [co] or _deferror, res, co, skt) end
if skt and copas.autoclose then skt:close() end
_errhandlers [co] = nil
end
end
-- accepts a connection on socket input
local function _accept(input, handler)
local client = input:accept()
if client then
client:settimeout(0)
local co = coroutine.create(handler)
_doTick (co, client)
--_reading:insert(client)
end
return client
end
-- handle threads on a queue
local function _tickRead (skt)
_doTick (_reading:pop (skt), skt)
end
local function _tickWrite (skt)
_doTick (_writing:pop (skt), skt)
end
-------------------------------------------------------------------------------
-- Adds a server/handler pair to Copas dispatcher
-------------------------------------------------------------------------------
local function addTCPserver(server, handler, timeout)
server:settimeout(timeout or 0.1)
_servers[server] = handler
_reading:insert(server)
end
local function addUDPserver(server, handler, timeout)
server:settimeout(timeout or 0)
local co = coroutine.create(handler)
_reading:insert(server)
_doTick (co, server)
end
function copas.addserver(server, handler, timeout)
if string.sub(tostring(server),1,3) == "udp" then
addUDPserver(server, handler, timeout)
else
addTCPserver(server, handler, timeout)
end
end
function copas.removeserver(server)
_servers[server] = nil
_reading:remove(server)
return server:close()
end
-------------------------------------------------------------------------------
-- Adds an new courotine thread to Copas dispatcher
-------------------------------------------------------------------------------
function copas.addthread(thread, ...)
if type(thread) ~= "thread" then
thread = coroutine.create(thread)
end
_doTick (thread, nil, ...)
return thread
end
-------------------------------------------------------------------------------
-- tasks registering
-------------------------------------------------------------------------------
local _tasks = {}
local function addtaskRead (tsk)
-- lets tasks call the default _tick()
tsk.def_tick = _tickRead
_tasks [tsk] = true
end
local function addtaskWrite (tsk)
-- lets tasks call the default _tick()
tsk.def_tick = _tickWrite
_tasks [tsk] = true
end
local function tasks ()
return next, _tasks
end
-------------------------------------------------------------------------------
-- main tasks: manage readable and writable socket sets
-------------------------------------------------------------------------------
-- a task to check ready to read events
local _readable_t = {
events = function(self)
local i = 0
return function ()
i = i + 1
return self._evs [i]
end
end,
tick = function (self, input)
local handler = _servers[input]
if handler then
input = _accept(input, handler)
else
_reading:remove (input)
self.def_tick (input)
end
end
}
addtaskRead (_readable_t)
-- a task to check ready to write events
local _writable_t = {
events = function (self)
local i = 0
return function ()
i = i + 1
return self._evs [i]
end
end,
tick = function (self, output)
_writing:remove (output)
self.def_tick (output)
end
}
addtaskWrite (_writable_t)
--
--sleeping threads task
local _sleeping_t = {
tick = function (self, time, ...)
_doTick(_sleeping:pop(time), ...)
end
}
-- yields the current coroutine and wakes it after 'sleeptime' seconds.
-- If sleeptime<0 then it sleeps until explicitly woken up using 'wakeup'
function copas.sleep(sleeptime)
coroutine.yield((sleeptime or 0), _sleeping)
end
-- Wakes up a sleeping coroutine 'co'.
function copas.wakeup(co)
_sleeping:wakeup(co)
end
local last_cleansing = 0
-------------------------------------------------------------------------------
-- Checks for reads and writes on sockets
-------------------------------------------------------------------------------
local function _select (timeout)
local err
local now = gettime()
local duration = function(t2, t1) return t2-t1 end
_readable_t._evs, _writable_t._evs, err = socket.select(_reading, _writing, timeout)
local r_evs, w_evs = _readable_t._evs, _writable_t._evs
if duration(now, last_cleansing) > WATCH_DOG_TIMEOUT then
last_cleansing = now
for k,v in pairs(_reading_log) do
if not r_evs[k] and duration(now, v) > WATCH_DOG_TIMEOUT then
_reading_log[k] = nil
r_evs[#r_evs + 1] = k
r_evs[k] = #r_evs
end
end
for k,v in pairs(_writing_log) do
if not w_evs[k] and duration(now, v) > WATCH_DOG_TIMEOUT then
_writing_log[k] = nil
w_evs[#w_evs + 1] = k
w_evs[k] = #w_evs
end
end
end
if err == "timeout" and #r_evs + #w_evs > 0 then
return nil
else
return err
end
end
-------------------------------------------------------------------------------
-- Dispatcher loop step.
-- Listen to client requests and handles them
-- Returns false if no data was handled (timeout), or true if there was data
-- handled (or nil + error message)
-------------------------------------------------------------------------------
function copas.step(timeout)
_sleeping_t:tick(gettime())
-- Need to wake up the select call it time for the next sleeping event
local nextwait = _sleeping:getnext()
if nextwait then
timeout = timeout and math.min(nextwait, timeout) or nextwait
end
local err = _select (timeout)
if err == "timeout" then return false end
if err then
error(err)
end
for tsk in tasks() do
for ev in tsk:events() do
tsk:tick (ev)
end
end
return true
end
-------------------------------------------------------------------------------
-- Dispatcher endless loop.
-- Listen to client requests and handles them forever
-------------------------------------------------------------------------------
function copas.loop(timeout)
while true do
copas.step(timeout)
end
end
return copas

View File

@ -0,0 +1,68 @@
-------------------------------------------------------------------------------
-- Coroutine safe xpcall and pcall versions
--
-- Encapsulates the protected calls with a coroutine based loop, so errors can
-- be dealed without the usual Lua 5.x pcall/xpcall issues with coroutines
-- yielding inside the call to pcall or xpcall.
--
-- Authors: Roberto Ierusalimschy and Andre Carregal
-- Contributors: Thomas Harning Jr., Ignacio Burgue<75>o, Fabio Mascarenhas
--
-- Copyright 2005 - Kepler Project (www.keplerproject.org)
--
-- $Id: coxpcall.lua,v 1.13 2008/05/19 19:20:02 mascarenhas Exp $
-------------------------------------------------------------------------------
-- Lua 5.2 makes this module a no-op
if _VERSION == "Lua 5.2" then
copcall = pcall
coxpcall = xpcall
return { pcall = pcall, xpcall = xpcall }
end
-------------------------------------------------------------------------------
-- Implements xpcall with coroutines
-------------------------------------------------------------------------------
local performResume, handleReturnValue
local oldpcall, oldxpcall = pcall, xpcall
local pack = table.pack or function(...) return {n = select("#", ...), ...} end
local unpack = table.unpack or unpack
function handleReturnValue(err, co, status, ...)
if not status then
return false, err(debug.traceback(co, (...)), ...)
end
if coroutine.status(co) == 'suspended' then
return performResume(err, co, coroutine.yield(...))
else
return true, ...
end
end
function performResume(err, co, ...)
return handleReturnValue(err, co, coroutine.resume(co, ...))
end
function coxpcall(f, err, ...)
local res, co = oldpcall(coroutine.create, f)
if not res then
local params = pack(...)
local newf = function() return f(unpack(params, 1, params.n)) end
co = coroutine.create(newf)
end
return performResume(err, co, ...)
end
-------------------------------------------------------------------------------
-- Implements pcall with coroutines
-------------------------------------------------------------------------------
local function id(trace, ...)
return ...
end
function copcall(f, ...)
return coxpcall(f, id, ...)
end
return { pcall = copcall, xpcall = coxpcall }

View File

@ -0,0 +1,112 @@
-- Luadist configuration
module ("dist.config", package.seeall)
local sys = require "dist.sys"
local utils = require "dist.utils"
local win = (os.getenv('WINDIR') or (os.getenv('OS') or ''):match('[Ww]indows'))
and not (os.getenv('OSTYPE') or ''):match('cygwin') -- exclude cygwin
-- System information ------------------------------------------------
version = "0.2.7" -- Current LuaDist version
-- set initial architecture as it's important for path separators
arch = win and "Windows" or "Linux" -- Host architecture
type = "x86" -- Host type
-- Directories -------------------------------------------------------
root_dir = os.getenv("DIST_ROOT") or utils.get_luadist_location() or sys.path_separator()
temp_dir = "tmp"
cache_dir = sys.make_path(temp_dir, "cache")
distinfos_dir = sys.make_path("share", "luadist-git", "dists")
test_dir = sys.make_path("share", "luadist-git", "test")
-- Files -------------------------------------------------------------
manifest_file = sys.make_path(cache_dir, ".gitmodules")
dep_cache_file = sys.make_path(cache_dir, ".depcache")
log_file = sys.make_path(temp_dir, "luadist.log")
cache_file = ""
-- Repositories ------------------------------------------------------
repos = {
"git://github.com/LuaDist/Repository.git",
}
upload_url = "git@github.com:LuaDist" -- must not contain trailing '/'
-- Settings ----------------------------------------------------------
debug = false -- Use debug mode.
verbose = false -- Print verbose output.
simulate = false -- Only simulate installation of packages.
binary = true -- Use binary version of modules.
source = true -- Use source version of modules.
test = false -- Run CTest before install.
cache = true -- Use cache.
cache_timeout = 3 * 60 * 60 -- Cache timeout in seconds.
dep_cache = true -- Use cache for dependency information (tree functionality).
-- Components (of modules) that will be installed.
components = {
"Runtime", "Library", "Header", "Data", "Documentation", "Example", "Test", "Other", "Unspecified"
}
-- Available log levels are: DEBUG, INFO, WARN, ERROR, FATAL (see dist.logger for more information).
print_log_level = "WARN" -- Minimum level for log messages to be printed (nil to disable).
write_log_level = "INFO" -- Minimum level for log messages to be logged (nil to disable).
-- CMake variables ---------------------------------------------------
variables = {
--- Install defaults
INSTALL_BIN = "bin",
INSTALL_LIB = "lib",
INSTALL_INC = "include",
INSTALL_ETC = "etc",
INSTALL_LMOD = "lib/lua",
INSTALL_CMOD = "lib/lua",
--- LuaDist specific variables
DIST_VERSION = version,
DIST_ARCH = arch,
DIST_TYPE = type,
-- CMake specific setup
CMAKE_GENERATOR = win and "MinGW Makefiles" or "Unix Makefiles",
CMAKE_BUILD_TYPE = "MinSizeRel",
-- RPath functionality
CMAKE_SKIP_BUILD_RPATH = "FALSE",
CMAKE_BUILD_WITH_INSTALL_RPATH = "FALSE",
CMAKE_INSTALL_RPATH = "$ORIGIN/../lib",
CMAKE_INSTALL_RPATH_USE_LINK_PATH = "TRUE",
CMAKE_INSTALL_NAME_DIR = "@executable_path/../lib",
-- OSX specific
CMAKE_OSX_ARCHITECTURES = "",
}
-- Building ----------------------------------------------------------
cmake = "cmake"
ctest = "ctest"
cache_command = cmake .. " -C cache.cmake"
build_command = cmake .. " --build . --clean-first"
install_component_command = " -DCOMPONENT=#COMPONENT# -P cmake_install.cmake"
test_command = ctest .. " -V ."
strip_option = " -DCMAKE_INSTALL_DO_STRIP=true"
cache_debug_options = "-DCMAKE_VERBOSE_MAKEFILE=true -DCMAKE_BUILD_TYPE=Debug"
build_debug_options = ""
-- Add -j option to make in case of unix makefiles to speed up builds
if (variables.CMAKE_GENERATOR == "Unix Makefiles") then
build_command = build_command .. " -- -j6"
end
-- Add -j option to make in case of MinGW makefiles to speed up builds
if (variables.CMAKE_GENERATOR == "MinGW Makefiles") then
build_command = "set SHELL=cmd.exe && " .. build_command .. " -- -j"
end

View File

@ -0,0 +1,271 @@
-- Note: the code of this module is borrowed from the original LuaDist project
--- LuaDist version constraints functions
-- Peter Drahoš, LuaDist Project, 2010
-- Original Code borrowed from LuaRocks Project
--- Version constraints handling functions.
-- Dependencies are represented in LuaDist through strings with
-- a dist name followed by a comma-separated list of constraints.
-- Each constraint consists of an operator and a version number.
-- In this string format, version numbers are represented as
-- naturally as possible, like they are used by upstream projects
-- (e.g. "2.0beta3"). Internally, LuaDist converts them to a purely
-- numeric representation, allowing comparison following some
-- "common sense" heuristics. The precise specification of the
-- comparison criteria is the source code of this module, but the
-- test/test_deps.lua file included with LuaDist provides some
-- insights on what these criteria are.
module ("dist.constraints", package.seeall)
local operators = {
["=="] = "==",
["~="] = "~=",
[">"] = ">",
["<"] = "<",
[">="] = ">=",
["<="] = "<=",
["~>"] = "~>",
-- plus some convenience translations
[""] = "==",
["-"] = "==",
["="] = "==",
["!="] = "~="
}
local deltas = {
scm = -100,
rc = -1000,
pre = -10000,
beta = -100000,
alpha = -1000000,
work = -10000000,
}
local version_mt = {
--- Equality comparison for versions.
-- All version numbers must be equal.
-- If both versions have revision numbers, they must be equal;
-- otherwise the revision number is ignored.
-- @param v1 table: version table to compare.
-- @param v2 table: version table to compare.
-- @return boolean: true if they are considered equivalent.
__eq = function(v1, v2)
if #v1 ~= #v2 then
return false
end
for i = 1, #v1 do
if v1[i] ~= v2[i] then
return false
end
end
if v1.revision and v2.revision then
return (v1.revision == v2.revision)
end
return true
end,
--- Size comparison for versions.
-- All version numbers are compared.
-- If both versions have revision numbers, they are compared;
-- otherwise the revision number is ignored.
-- @param v1 table: version table to compare.
-- @param v2 table: version table to compare.
-- @return boolean: true if v1 is considered lower than v2.
__lt = function(v1, v2)
for i = 1, math.max(#v1, #v2) do
local v1i, v2i = v1[i] or 0, v2[i] or 0
if v1i ~= v2i then
return (v1i < v2i)
end
end
if v1.revision and v2.revision then
return (v1.revision < v2.revision)
end
return false
end
}
local version_cache = {}
setmetatable(version_cache, {
__mode = "kv"
})
--- Parse a version string, converting to table format.
-- A version table contains all components of the version string
-- converted to numeric format, stored in the array part of the table.
-- If the version contains a revision, it is stored numerically
-- in the 'revision' field. The original string representation of
-- the string is preserved in the 'string' field.
-- Returned version tables use a metatable
-- allowing later comparison through relational operators.
-- @param vstring string: A version number in string format.
-- @return table or nil: A version table or nil
-- if the input string contains invalid characters.
function parseVersion(vstring)
if not vstring then return nil end
assert(type(vstring) == "string")
local cached = version_cache[vstring]
if cached then
return cached
end
local version = {}
local i = 1
local function add_token(number)
version[i] = version[i] and version[i] + number/100000 or number
i = i + 1
end
-- trim leading and trailing spaces
vstring = vstring:match("^%s*(.*)%s*$")
version.string = vstring
-- store revision separately if any
local main, revision = vstring:match("(.*)%-(%d+)$")
if revision then
vstring = main
version.revision = tonumber(revision)
end
while #vstring > 0 do
-- extract a number
local token, rest = vstring:match("^(%d+)[%.%-%_]*(.*)")
if token then
add_token(tonumber(token))
else
-- extract a word
token, rest = vstring:match("^(%a+)[%.%-%_]*(.*)")
if not token then
return nil
end
local last = #version
version[i] = deltas[token] or (token:byte() / 1000)
end
vstring = rest
end
setmetatable(version, version_mt)
version_cache[vstring] = version
return version
end
--- Utility function to compare version numbers given as strings.
-- @param a string: one version.
-- @param b string: another version.
-- @return boolean: True if a > b.
function compareVersions(a, b)
return parseVersion(a) > parseVersion(b)
end
--- Consumes a constraint from a string, converting it to table format.
-- For example, a string ">= 1.0, > 2.0" is converted to a table in the
-- format {op = ">=", version={1,0}} and the rest, "> 2.0", is returned
-- back to the caller.
-- @param input string: A list of constraints in string format.
-- @return (table, string) or nil: A table representing the same
-- constraints and the string with the unused input, or nil if the
-- input string is invalid.
local function parseConstraint(input)
assert(type(input) == "string")
local op, version, rest = input:match("^([<>=~!]*)%s*([%w%.%_%-]+)[%s,]*(.*)")
op = operators[op]
version = parseVersion(version)
if not op or not version then return nil end
return { op = op, version = version }, rest
end
--- Convert a list of constraints from string to table format.
-- For example, a string ">= 1.0, < 2.0" is converted to a table in the format
-- {{op = ">=", version={1,0}}, {op = "<", version={2,0}}}.
-- Version tables use a metatable allowing later comparison through
-- relational operators.
-- @param input string: A list of constraints in string format.
-- @return table or nil: A table representing the same constraints,
-- or nil if the input string is invalid.
function parseConstraints(input)
assert(type(input) == "string")
local constraints, constraint = {}, nil
while #input > 0 do
constraint, input = parseConstraint(input)
if constraint then
table.insert(constraints, constraint)
else
return nil
end
end
return constraints
end
--- A more lenient check for equivalence between versions.
-- This returns true if the requested components of a version
-- match and ignore the ones that were not given. For example,
-- when requesting "2", then "2", "2.1", "2.3.5-9"... all match.
-- When requesting "2.1", then "2.1", "2.1.3" match, but "2.2"
-- doesn't.
-- @param version string or table: Version to be tested; may be
-- in string format or already parsed into a table.
-- @param requested string or table: Version requested; may be
-- in string format or already parsed into a table.
-- @return boolean: True if the tested version matches the requested
-- version, false otherwise.
local function partialMatch(version, requested)
assert(type(version) == "string" or type(version) == "table")
assert(type(requested) == "string" or type(version) == "table")
if type(version) ~= "table" then version = parseVersion(version) end
if type(requested) ~= "table" then requested = parseVersion(requested) end
if not version or not requested then return false end
for i = 1, #requested do
if requested[i] ~= version[i] then return false end
end
if requested.revision then
return requested.revision == version.revision
end
return true
end
--- Check if a version satisfies a set of constraints.
-- @param version table: A version in table format
-- @param constraints table: An array of constraints in table format.
-- @return boolean: True if version satisfies all constraints,
-- false otherwise.
function matchConstraints(version, constraints)
assert(type(version) == "table")
assert(type(constraints) == "table")
local ok = true
setmetatable(version, version_mt)
for _, constr in pairs(constraints) do
local constr_version = constr.version
setmetatable(constr.version, version_mt)
if constr.op == "==" then ok = version == constr_version
elseif constr.op == "~=" then ok = version ~= constr_version
elseif constr.op == ">" then ok = version > constr_version
elseif constr.op == "<" then ok = version < constr_version
elseif constr.op == ">=" then ok = version >= constr_version
elseif constr.op == "<=" then ok = version <= constr_version
elseif constr.op == "~>" then ok = partialMatch(version, constr_version)
end
if not ok then break end
end
return ok
end
--- Check if a version string is satisfied by a constraint string.
-- @param version string: A version in string format
-- @param constraints string: Constraints in string format.
-- @return boolean: True if version satisfies all constraints,
-- false otherwise.
function constraint_satisfied(version, constraints)
local const = parseConstraints(constraints)
local ver = parseVersion(version)
if const and ver then
return matchConstraints(ver, const)
end
return nil, "Error parsing versions."
end

View File

@ -0,0 +1,770 @@
-- Utility functions for dependencies
module ("dist.depends", package.seeall)
local cfg = require "dist.config"
local mf = require "dist.manifest"
local sys = require "dist.sys"
local const = require "dist.constraints"
local utils = require "dist.utils"
local package = require "dist.package"
-- Return all packages with specified names from manifest.
-- Names can also contain version constraint (e.g. 'copas>=1.2.3', 'saci-1.0' etc.).
function find_packages(package_names, manifest)
if type(package_names) == "string" then package_names = {package_names} end
manifest = manifest or mf.get_manifest()
assert(type(package_names) == "table", "depends.find_packages: Argument 'package_names' is not a table or string.")
assert(type(manifest) == "table", "depends.find_packages: Argument 'manifest' is not a table.")
local packages_found = {}
-- find matching packages in manifest
for _, pkg_to_find in pairs(package_names) do
local pkg_name, pkg_constraint = split_name_constraint(pkg_to_find)
pkg_name = utils.escape_magic(pkg_name):gsub("%%%*",".*")
for _, repo_pkg in pairs(manifest) do
if string.match(repo_pkg.name, "^" .. pkg_name .. "$") and (not pkg_constraint or satisfies_constraint(repo_pkg.version, pkg_constraint)) then
table.insert(packages_found, repo_pkg)
end
end
end
return packages_found
end
-- Return manifest consisting of packages installed in specified deploy_dir directory
function get_installed(deploy_dir)
deploy_dir = deploy_dir or cfg.root_dir
assert(type(deploy_dir) == "string", "depends.get_installed: Argument 'deploy_dir' is not a string.")
deploy_dir = sys.abs_path(deploy_dir)
local distinfos_path = sys.make_path(deploy_dir, cfg.distinfos_dir)
local manifest = {}
if not sys.is_dir(distinfos_path) then return {} end
-- from all directories of packages installed in deploy_dir
for dir in sys.get_directory(distinfos_path) do
if dir ~= "." and dir ~= ".." and sys.is_dir(sys.make_path(distinfos_path, dir)) then
local pkg_dist_dir = sys.make_path(distinfos_path, dir)
-- load the dist.info file
for file in sys.get_directory(pkg_dist_dir) do
local pkg_dist_file = sys.make_path(pkg_dist_dir, file)
if sys.is_file(pkg_dist_file) then
table.insert(manifest, mf.load_distinfo(pkg_dist_file))
end
end
end
end
return manifest
end
-- If 'pkg.selected' == true then returns 'selected' else 'installed'.
-- Used in error messages.
local function selected_or_installed(pkg)
assert(type(pkg) == "table", "depends.selected_or_installed: Argument 'pkg' is not a table.")
if pkg.selected == true then
return "selected"
else
return "installed"
end
end
-- Return whether the 'package_name' is installed according to the the manifest 'installed_pkgs'
-- If optional 'version_wanted' constraint is specified, then installed packages must
-- also satisfy specified version constraint.
-- If package is installed but doesn't satisfy version constraint, error message
-- is returned as the second value.
function is_installed(package_name, installed_pkgs, version_wanted)
assert(type(package_name) == "string", "depends.is_installed: Argument 'package_name' is not a string.")
assert(type(installed_pkgs) == "table", "depends.is_installed: Argument 'installed_pkgs' is not a table.")
assert(type(version_wanted) == "string" or type(version_wanted) == "nil", "depends.is_installed: Argument 'version_wanted' is not a string or nil.")
local pkg_is_installed, err = false, nil
for _, installed_pkg in pairs(installed_pkgs) do
-- check if package_name is in installed
if package_name == installed_pkg.name then
-- check if package is installed in satisfying version
if not version_wanted or satisfies_constraint(installed_pkg.version, version_wanted) then
pkg_is_installed = true
break
else
err = "Package '" .. package_name .. (version_wanted and " " .. version_wanted or "") .. "' needed, but " .. selected_or_installed(installed_pkg) .. " at version '" .. installed_pkg.version .. "'."
break
end
end
end
return pkg_is_installed, err
end
-- Check whether the package 'pkg' conflicts with 'installed_pkg' and return
-- false or error message.
local function packages_conflicts(pkg, installed_pkg)
assert(type(pkg) == "table", "depends.packages_conflicts: Argument 'pkg' is not a table.")
assert(type(installed_pkg) == "table", "depends.packages_conflicts: Argument 'installed_pkg' is not a table.")
-- check if pkg doesn't provide an already installed_pkg
if pkg.provides then
-- for all of pkg's provides
for _, provided_pkg in pairs(get_provides(pkg)) do
if provided_pkg.name == installed_pkg.name then
return "Package '" .. pkg_full_name(pkg.name, pkg.version, pkg.was_scm_version) .. "' provides '" .. pkg_full_name(provided_pkg.name, provided_pkg.version) .. "' but package '" .. pkg_full_name(installed_pkg.name, installed_pkg.version) .. "' is already " .. selected_or_installed(installed_pkg) .. "."
end
end
end
-- check for conflicts of package to install with installed package
if pkg.conflicts then
for _, conflict in pairs (pkg.conflicts) do
if conflict == installed_pkg.name then
return "Package '" .. pkg_full_name(pkg.name, pkg.version, pkg.was_scm_version) .. "' conflicts with already " .. selected_or_installed(installed_pkg) .. " package '" .. pkg_full_name(installed_pkg.name, installed_pkg.version) .. "'."
end
end
end
-- check for conflicts of installed package with package to install
if installed_pkg.conflicts then
-- direct conflicts with 'pkg'
for _, conflict in pairs (installed_pkg.conflicts) do
if conflict == pkg.name then
return "Already " .. selected_or_installed(installed_pkg) .. " package '" .. pkg_full_name(installed_pkg.name, installed_pkg.version) .. "' conflicts with package '" .. pkg_full_name(pkg.name, pkg.version, pkg.was_scm_version) .. "'."
end
end
-- conflicts with 'provides' of 'pkg' (packages provided by package to install)
if pkg.provides then
for _, conflict in pairs (installed_pkg.conflicts) do
-- for all of pkg's provides
for _, provided_pkg in pairs(get_provides(pkg)) do
if conflict == provided_pkg.name then
return "Already '" .. selected_or_installed(installed_pkg) .. " package '" .. pkg_full_name(installed_pkg.name, installed_pkg.version) .. "' conflicts with package '" .. pkg_full_name(provided_pkg.name, provided_pkg.version) .. "' provided by '" .. pkg_full_name(pkg.name, pkg.version, pkg.was_scm_version) .. "'."
end
end
end
end
end
-- no conflicts found
return false
end
-- Return table of package dependencies 'depends' with OS specific dependencies extracted.
--
-- OS specific dependencies are stored in a subtable with 'arch' as a key.
-- E.g. this table containing OS specific dependencies:
-- depends = {
-- "lua~>5.1",
-- "luadist-git>=0.1",
-- Linux = {
-- "iup>=3.6",
-- "wxlua>=2.8.10.0",
-- },
-- Windows = {
-- "luagd>=2.0.33r2",
-- "luacom>=1.4.1",
-- },
-- }
--
-- ...will be on the 'Linux' architecture (determined by cfg.arch) converted into:
-- depends = {
-- "lua~>5.1",
-- "luadist-git>=0.1",
-- "iup>=3.6",
-- "wxlua>=2.8.10.0",
-- }
function extract_os_specific_depends(depends)
assert(type(depends) == "table", "depends.extract_os_specific_depends: Argument 'depends' is not a table.")
local extracted = {}
for k, depend in pairs(depends) do
-- if 'depend' is a table, then it must be a table of OS specific
-- dependencies, so extract it if it's for this architecture
if type(depend) == "table" then
if k == cfg.arch then
for _, os_specific_depend in pairs(depend) do
table.insert(extracted, os_specific_depend)
end
end
else
table.insert(extracted, depend)
end
end
return extracted
end
-- Return all packages needed in order to install package 'pkg'
-- and with specified 'installed' packages in the system using 'manifest'.
-- 'pkg' can also contain version constraint (e.g. 'copas>=1.2.3', 'saci-1.0' etc.).
--
-- This function also downloads packages to get information about their dependencies.
-- Directory where the package was downloaded is stored in 'download_dir' attribute
-- of that package in the table of packages returned by this function.
--
-- Optional argument 'dependency_manifest' is a table of dependencies examined
-- from previous installations etc. It can be used to speed-up the dependency
-- resolving procedure for example.
--
-- When optional 'force_no_download' parameter is set to true, then information
-- about packages won't be downloaded during dependency resolving, assuming that
-- entries in the provided manifest are already complete.
--
-- When optional 'suppress_printing' parameter is set to true, then messages
-- for the user won't be printed during dependency resolving.
--
-- Optional argument 'deploy_dir' is used just as a temporary place to place
-- the downloaded packages into.
--
-- 'dependency_parents' is table of all packages encountered so far when resolving dependencies
-- and is used to detect and deal with circular dependencies. Leave it 'nil'
-- and it will do its job just fine :-).
--
-- 'tmp_installed' is internal table used in recursion and should be left 'nil' when
-- calling this function from other context. It is used for passing the changes
-- in installed packages between the recursive calls of this function.
--
-- TODO: refactor this spaghetti code!
local function get_packages_to_install(pkg, installed, manifest, dependency_manifest, force_no_download, suppress_printing, deploy_dir, dependency_parents, tmp_installed)
manifest = manifest or mf.get_manifest()
dependency_manifest = dependency_manifest or {}
force_no_download = force_no_download or false
suppress_printing = suppress_printing or false
deploy_dir = deploy_dir or cfg.root_dir
dependency_parents = dependency_parents or {}
-- set helper table 'tmp_installed'
tmp_installed = tmp_installed or utils.deepcopy(installed)
assert(type(pkg) == "string", "depends.get_packages_to_install: Argument 'pkg' is not a string.")
assert(type(installed) == "table", "depends.get_packages_to_install: Argument 'installed' is not a table.")
assert(type(manifest) == "table", "depends.get_packages_to_install: Argument 'manifest' is not a table.")
assert(type(dependency_manifest) == "table", "depends.get_packages_to_install: Argument 'dependency_manifest' is not a table.")
assert(type(force_no_download) == "boolean", "depends.get_packages_to_install: Argument 'force_no_download' is not a boolean.")
assert(type(suppress_printing) == "boolean", "depends.get_packages_to_install: Argument 'suppress_printing' is not a boolean.")
assert(type(deploy_dir) == "string", "depends.get_packages_to_install: Argument 'deploy_dir' is not a string.")
assert(type(dependency_parents) == "table", "depends.get_packages_to_install: Argument 'dependency_parents' is not a table.")
assert(type(tmp_installed) == "table", "depends.get_packages_to_install: Argument 'tmp_installed' is not a table.")
deploy_dir = sys.abs_path(deploy_dir)
--[[ for future debugging:
print('resolving: '.. pkg)
print(' installed: ', utils.table_tostring(installed))
print(' tmp_installed: ', utils.table_tostring(tmp_installed))
--]]
-- check if package is already installed
local pkg_name, pkg_constraint = split_name_constraint(pkg)
local pkg_is_installed, err = is_installed(pkg_name, tmp_installed, pkg_constraint)
if pkg_is_installed then return {} end
if err then return nil, err end
-- table of packages needed to be installed (will be returned)
local to_install = {}
-- find out available versions of 'pkg' and insert them into manifest
if not force_no_download then
local versions, err = package.retrieve_versions(pkg, manifest, suppress_printing)
if not versions then return nil, err end
for _, version in pairs(versions) do
table.insert(manifest, version)
end
end
-- find candidates & sort them
local candidates_to_install = find_packages(pkg, manifest)
if #candidates_to_install == 0 then
return nil, "No suitable candidate for '" .. pkg .. "' found."
end
candidates_to_install = sort_by_versions(candidates_to_install)
for _, pkg in pairs(candidates_to_install) do
--[[ for future debugging:
print(' candidate: '.. pkg.name..'-'..pkg.version)
print(' installed: ', utils.table_tostring(installed))
print(' tmp_installed: ', utils.table_tostring(tmp_installed))
print(' to_install: ', utils.table_tostring(to_install))
print(' -is installed: ', is_installed(pkg.name, tmp_installed, pkg_constraint))
--]]
-- if there's an error from the previous candidate, print the reason for trying another one
if not suppress_printing and err then print(" - trying another candidate due to: " .. err) end
-- clear the state from the previous candidate
pkg_is_installed, err = false, nil
-- check whether this package has already been added to 'tmp_installed' by another of its candidates
pkg_is_installed, err = is_installed(pkg.name, tmp_installed, pkg_constraint)
if pkg_is_installed then break end
-- preserve information about the 'scm' version, because pkg.version
-- will be rewritten by information taken from pkg's dist.info file
local was_scm_version = (pkg.version == "scm")
-- Try to obtain cached dependency information from the dependency manifest
if dependency_manifest[pkg.name .. "-" .. pkg.version] and cfg.dep_cache then
pkg = dependency_manifest[pkg.name .. "-" .. pkg.version]
else
-- download info about the package if not already downloaded and downloading not prohibited
if not (pkg.download_dir or force_no_download) then
local path_or_err
pkg, path_or_err = package.retrieve_pkg_info(pkg, deploy_dir, suppress_printing)
if not pkg then
err = "Error when resolving dependencies: " .. path_or_err
else
-- set path to downloaded package - used to indicate that the
-- package was already downloaded, to delete unused but downloaded
-- packages and also to install choosen packages
pkg.download_dir = path_or_err
end
end
end
if pkg and was_scm_version then pkg.was_scm_version = true end
-- check arch & type
if not err then
if not (pkg.arch == "Universal" or pkg.arch == cfg.arch) or
not (pkg.type == "all" or pkg.type == "source" or pkg.type == cfg.type) then
err = "Package '" .. pkg_full_name(pkg.name, pkg.version) .. "' doesn't have required arch and type."
end
end
-- checks for conflicts with other installed (or previously selected) packages
if not err then
for _, installed_pkg in pairs(tmp_installed) do
err = packages_conflicts(pkg, installed_pkg)
if err then break end
end
end
-- if pkg passed all of the above tests
if not err then
-- check if pkg's dependencies are satisfied
if pkg.depends then
-- insert pkg into the stack of circular dependencies detection
table.insert(dependency_parents, pkg.name)
-- extract all OS specific dependencies of pkg
pkg.depends = extract_os_specific_depends(pkg.depends)
-- for all dependencies of pkg
for _, depend in pairs(pkg.depends) do
local dep_name = split_name_constraint(depend)
-- detect circular dependencies using 'dependency_parents'
local is_circular_dependency = false
for _, parent in pairs(dependency_parents) do
if dep_name == parent then
is_circular_dependency = true
break
end
end
-- if circular dependencies not detected
if not is_circular_dependency then
-- recursively call this function on the candidates of this pkg's dependency
local depends_to_install, dep_err = get_packages_to_install(depend, installed, manifest, dependency_manifest, force_no_download, suppress_printing, deploy_dir, dependency_parents, tmp_installed)
-- if any suitable dependency packages were found, insert them to the 'to_install' table
if depends_to_install then
for _, depend_to_install in pairs(depends_to_install) do
-- add some meta information
if not depend_to_install.selected_by then
depend_to_install.selected_by = pkg.name .. "-" .. pkg.version
end
table.insert(to_install, depend_to_install)
table.insert(tmp_installed, depend_to_install)
table.insert(installed, depend_to_install)
end
else
err = "Error getting dependency of '" .. pkg_full_name(pkg.name, pkg.version) .. "': " .. dep_err
break
end
-- if circular dependencies detected
else
err = "Error getting dependency of '" .. pkg_full_name(pkg.name, pkg.version) .. "': '" .. dep_name .. "' is a circular dependency."
break
end
end
-- remove last package from the stack of circular dependencies detection
table.remove(dependency_parents)
end
-- if no error occured
if not err then
-- add pkg and it's provides to the fake table of installed packages, with
-- property 'selected' set, indicating that the package isn't
-- really installed in the system, just selected to be installed (this is used e.g. in error messages)
pkg.selected = true
table.insert(tmp_installed, pkg)
if pkg.provides then
for _, provided_pkg in pairs(get_provides(pkg)) do
provided_pkg.selected = true
table.insert(tmp_installed, provided_pkg)
end
end
-- add pkg to the table of packages to install
table.insert(to_install, pkg)
-- if some error occured
else
-- delete the downloaded package
if pkg.download_dir and not cfg.debug then sys.delete(pkg.download_dir) end
-- set tables of 'packages to install' and 'installed packages' to their original state
to_install = {}
tmp_installed = utils.deepcopy(installed)
-- add provided packages to installed ones
for _, installed_pkg in pairs(tmp_installed) do
for _, pkg in pairs(get_provides(installed_pkg)) do
table.insert(tmp_installed, pkg)
end
end
end
-- if error occured
else
-- delete the downloaded package
if pkg and pkg.download_dir and not cfg.debug then sys.delete(pkg.download_dir) end
-- if pkg is already installed, skip checking its other candidates
if pkg_is_installed then break end
end
end
-- if package is not installed and no suitable candidates were found, return the last error
if #to_install == 0 and not pkg_is_installed then
return nil, err
else
return to_install
end
end
-- Resolve dependencies and return all packages needed in order to install
-- 'packages' into the system with already 'installed' packages, using 'manifest'.
-- Also return the table of the dependencies determined during the process
-- as the second return value.
--
-- Optional argument 'dependency_manifest' is a table of dependencies examined
-- from previous installations etc. It can be used to speed-up the dependency
-- resolving procedure for example.
--
-- Optional argument 'deploy_dir' is used as a temporary place to place the
-- downloaded packages into.
--
-- When optional 'force_no_download' parameter is set to true, then information
-- about packages won't be downloaded during dependency resolving, assuming that
-- entries in manifest are complete.
--
-- When optional 'suppress_printing' parameter is set to true, then messages
-- for the user won't be printed during dependency resolving.
function get_depends(packages, installed, manifest, dependency_manifest, deploy_dir, force_no_download, suppress_printing)
if not packages then return {} end
manifest = manifest or mf.get_manifest()
dependency_manifest = dependency_manifest or {}
deploy_dir = deploy_dir or cfg.root_dir
force_no_download = force_no_download or false
suppress_printing = suppress_printing or false
if type(packages) == "string" then packages = {packages} end
assert(type(packages) == "table", "depends.get_depends: Argument 'packages' is not a table or string.")
assert(type(installed) == "table", "depends.get_depends: Argument 'installed' is not a table.")
assert(type(manifest) == "table", "depends.get_depends: Argument 'manifest' is not a table.")
assert(type(dependency_manifest) == "table", "depends.get_depends: Argument 'dependency_manifest' is not a table.")
assert(type(deploy_dir) == "string", "depends.get_depends: Argument 'deploy_dir' is not a string.")
assert(type(force_no_download) == "boolean", "depends.get_depends: Argument 'force_no_download' is not a boolean.")
assert(type(suppress_printing) == "boolean", "depends.get_depends: Argument 'suppress_printing' is not a boolean.")
deploy_dir = sys.abs_path(deploy_dir)
local tmp_installed = utils.deepcopy(installed)
-- add provided packages to installed ones
for _, installed_pkg in pairs(tmp_installed) do
for _, pkg in pairs(get_provides(installed_pkg)) do
table.insert(tmp_installed, pkg)
end
end
-- If 'pkg' contains valid (architecture specific) path separator,
-- it is treated like a path to already downloaded package and
-- we assume that user wants to use this specific version of the
-- module to be installed. Hence, we will add information about
-- this version into the manifest and also remove references to
-- any other versions of this module from the manifest. This will
-- enforce the version of the module required by the user.
for k, pkg in pairs(packages) do
if pkg:find(sys.path_separator()) then
local pkg_dir = sys.abs_path(pkg)
local pkg_info, err = mf.load_distinfo(sys.make_path(pkg_dir, "dist.info"))
if not pkg_info then return nil, err end
-- add information about location of the package, also to prevent downloading it again
pkg_info.download_dir = pkg_dir
-- mark package to skip deleting its directory after installation
pkg_info.preserve_pkg_dir = true
-- set default arch/type if not explicitly stated and package is of source type
if package.is_source_type(pkg_dir) then
pkg_info = package.ensure_source_arch_and_type(pkg_info)
elseif not (pkg_info.arch and pkg_info.type) then
return nil, pkg_dir .. ": binary package missing arch or type in 'dist.info'."
end
-- update manifest
manifest = utils.filter(manifest, function(p) return p.name ~= pkg_info.name and true end)
table.insert(manifest, pkg_info)
-- update packages to install
pkg = pkg_info.name .. "-" .. pkg_info.version
packages[k] = pkg
end
end
local to_install = {}
-- get packages needed to satisfy the dependencies
for _, pkg in pairs(packages) do
local needed_to_install, err = get_packages_to_install(pkg, tmp_installed, manifest, dependency_manifest, force_no_download, suppress_printing, deploy_dir)
-- if everything's fine
if needed_to_install then
for _, needed_pkg in pairs(needed_to_install) do
-- TODO: why not to use 'installed' instead of 'tmp_installed'?
-- It's because provides aren't searched for by find()
-- function inside the update_dependency_manifest().
dependency_manifest = update_dependency_manifest(needed_pkg, tmp_installed, needed_to_install, dependency_manifest)
table.insert(to_install, needed_pkg)
table.insert(tmp_installed, needed_pkg)
-- add provides of needed_pkg to installed ones
for _, provided_pkg in pairs(get_provides(needed_pkg)) do
-- copy 'selected' property
provided_pkg.selected = needed_pkg.selected
table.insert(tmp_installed, provided_pkg)
end
end
-- if error occured
else
-- delete already downloaded packages
for _, pkg in pairs(to_install) do
if pkg.download_dir and not cfg.debug then sys.delete(pkg.download_dir) end
end
return nil, "Cannot resolve dependencies for '" .. pkg .. "': ".. err
end
end
return to_install, dependency_manifest
end
-- Return table of packages provided by specified package (from it's 'provides' field)
function get_provides(package)
assert(type(package) == "table", "depends.get_provides: Argument 'package' is not a table.")
if not package.provides then return {} end
local provided = {}
for _, provided_name in pairs(package.provides) do
local pkg = {}
pkg.name, pkg.version = split_name_constraint(provided_name)
pkg.type = package.type
pkg.arch = package.arch
pkg.provided = package.name .. "-" .. package.version
table.insert(provided, pkg)
end
return provided
end
-- Return package name and version constraint from full package version constraint specification
-- E. g.:
-- for 'luaexpat-1.2.3' return: 'luaexpat' , '1.2.3'
-- for 'luajit >= 1.2' return: 'luajit' , '>=1.2'
function split_name_constraint(version_constraint)
assert(type(version_constraint) == "string", "depends.split_name_constraint: Argument 'version_constraint' is not a string.")
local split = version_constraint:find("[%s=~<>-]+%d") or version_constraint:find("[%s=~<>-]+scm")
if split then
return version_constraint:sub(1, split - 1), version_constraint:sub(split):gsub("[%s-]", "")
else
return version_constraint, nil
end
end
-- Return only packages that can be installed on the specified architecture and type
function filter_packages_by_arch_and_type(packages, req_arch, req_type)
assert(type(packages) == "table", "depends.filter_packages_by_arch_and_type: Argument 'packages' is not a table.")
assert(type(req_arch) == "string", "depends.filter_packages_by_arch_and_type: Argument 'req_arch' is not a string.")
assert(type(req_type) == "string", "depends.filter_packages_by_arch_and_type: Argument 'pkg_type' is not a string.")
return utils.filter(packages,
function (pkg)
return (pkg.arch == "Universal" or pkg.arch == req_arch) and
(pkg.type == "all" or pkg.type == "source" or pkg.type == req_type)
end)
end
-- Return only packages that contain one of the specified strings in their 'name-version'.
-- Case is ignored. If no strings are specified, return all the packages.
-- Argument 'search_in_desc' specifies if search also in description of packages.
function filter_packages_by_strings(packages, strings, search_in_desc)
if type(strings) == "string" then strings = {strings} end
assert(type(packages) == "table", "depends.filter_packages_by_strings: Argument 'packages' is not a table.")
assert(type(strings) == "table", "depends.filter_packages_by_strings: Argument 'strings' is not a string or table.")
if #strings ~= 0 then
return utils.filter(packages,
function (pkg)
for _,str in pairs(strings) do
local name = pkg.name .. "-" .. pkg.version
if search_in_desc then
name = name .. " " .. (pkg.desc or "")
end
if string.find(string.lower(name), string.lower(str), 1 ,true) ~= nil then return true end
end
end)
else
return packages
end
end
-- Return full package name and version string (e.g. 'luajit-2.0'). When version
-- is nil or '' then return only name (e.g. 'luajit') and when name is nil or ''
-- then return '<unknown>'. Optional 'was_scm_version' argument is a boolean,
-- stating whether the package was originally selected for installation as a 'scm' version.
function pkg_full_name(name, version, was_scm_version)
name = name or ""
version = version or ""
was_scm_version = was_scm_version or false
if type(version) == "number" then version = tostring(version) end
assert(type(name) == "string", "depends.pkg_full_name: Argument 'name' is not a string.")
assert(type(version) == "string", "depends.pkg_full_name: Argument 'version' is not a string.")
if was_scm_version then version = version .. " [scm version]" end
if name == "" then
return "<unknown>"
else
return name .. ((version ~= "") and "-" .. version or "")
end
end
-- Return table of packages, sorted descendingly by versions (newer ones are moved to the top).
function sort_by_versions(packages)
assert(type(packages) == "table", "depends.sort_by_versions: Argument 'packages' is not a table.")
return utils.sort(packages, function (a, b) return compare_versions(a.version, b.version) end)
end
-- Return table of packages, sorted alphabetically by name and then descendingly by version.
function sort_by_names(packages)
assert(type(packages) == "table", "depends.sort_by_names: Argument 'packages' is not a table.")
return utils.sort(packages, function (a, b)
if a.name == b.name then
return compare_versions(a.version, b.version)
else
return a.name < b.name
end
end)
end
-- Return if version satisfies the specified constraint
function satisfies_constraint(version, constraint)
assert(type(version) == "string", "depends.satisfies_constraint: Argument 'version' is not a string.")
assert(type(constraint) == "string", "depends.satisfies_constraint: Argument 'constraint' is not a string.")
return const.constraint_satisfied(version, constraint)
end
-- For package versions, return whether: 'version_a' > 'version_b'
function compare_versions(version_a, version_b)
assert(type(version_a) == "string", "depends.compare_versions: Argument 'version_a' is not a string.")
assert(type(version_b) == "string", "depends.compare_versions: Argument 'version_b' is not a string.")
return const.compareVersions(version_a, version_b)
end
-- Returns 'dep_manifest' updated with information about the 'pkg'.
-- 'installed' is table with installed packages
-- 'to_install' is table with packages that are selected for installation
-- Packages satisfying the dependencies will be searched for in these two tables.
function update_dependency_manifest(pkg, installed, to_install, dep_manifest)
dep_manifest = dep_manifest or {}
assert(type(pkg) == "table", "depends.update_dependency_manifest: Argument 'pkg' is not a table.")
assert(type(installed) == "table", "depends.update_dependency_manifest: Argument 'installed' is not a table.")
assert(type(to_install) == "table", "depends.update_dependency_manifest: Argument 'to_install' is not a table.")
assert(type(dep_manifest) == "table", "depends.update_dependency_manifest: Argument 'dep_manifest' is not a table.")
local name_ver = pkg.name .. "-" .. (pkg.was_scm_version and "scm" or pkg.version)
-- add to manifest
if not dep_manifest[name_ver] then
dep_manifest[name_ver] = {}
dep_manifest[name_ver].name = pkg.name
dep_manifest[name_ver].version = pkg.version
dep_manifest[name_ver].was_scm_version = pkg.was_scm_version
dep_manifest[name_ver].arch = pkg.arch
dep_manifest[name_ver].type = pkg.type
dep_manifest[name_ver].path = pkg.path
dep_manifest[name_ver].depends = pkg.depends
dep_manifest[name_ver].conflicts = pkg.conflicts
dep_manifest[name_ver].provides = pkg.provides
dep_manifest[name_ver].license = pkg.license
dep_manifest[name_ver].desc = pkg.desc
dep_manifest[name_ver].url = pkg.url
dep_manifest[name_ver].author = pkg.author
dep_manifest[name_ver].maintainer = pkg.maintainer
-- add information which dependency is satisfied by which package
if pkg.depends then
-- TODO: Won't it be better to add OS-specific 'satisfied_by' metadata in a format like OS-specific 'depends' ?
local all_deps = extract_os_specific_depends(pkg.depends)
dep_manifest[name_ver].satisfied_by = {}
for _, depend in pairs(all_deps) do
-- find package satisfying the dependency
local satisfying = find_packages(depend, installed)[1] or find_packages(depend, to_install)[1]
satisfying = satisfying.name .. "-" .. satisfying.version
dep_manifest[name_ver].satisfied_by[depend] = satisfying
-- check whether the satisfying package isn't provided by other one
local provided_by = utils.filter(installed, function(pkg)
return pkg.provides and utils.contains(pkg.provides, satisfying)
end)
if #provided_by == 0 then
provided_by = utils.filter(to_install, function(pkg)
return pkg.provides and utils.contains(pkg.provides, satisfying)
end)
end
if #provided_by ~= 0 then
if not dep_manifest[name_ver].satisfying_provided_by then
dep_manifest[name_ver].satisfying_provided_by = {}
end
dep_manifest[name_ver].satisfying_provided_by[satisfying] = provided_by[1].name .. "-" .. provided_by[1].version
end
end
end
end
return dep_manifest
end

View File

@ -0,0 +1,306 @@
-- Encapsulated Git functionality
module ("dist.git", package.seeall)
require "git"
local sys = require "dist.sys"
local cfg = require "dist.config"
-- Clone the repository from url to dest_dir
function clone(repository_url, dest_dir, depth, branch)
assert(type(repository_url) == "string", "git.clone: Argument 'repository_url' is not a string.")
assert(type(dest_dir) == "string", "git.clone: Argument 'dest_dir' is not a string.")
dest_dir = sys.abs_path(dest_dir)
local command = "git clone " .. repository_url
if depth then
assert(type(depth) == "number", "git.clone: Argument 'depth' is not a number.")
command = command .. " --depth " .. depth
end
if branch then
assert(type(branch) == "string", "git.clone: Argument 'branch' is not a string.")
command = command .. " -b " .. branch
end
command = command .. " " .. sys.quote(dest_dir)
if sys.exists(dest_dir) then sys.delete(dest_dir) end
sys.make_dir(dest_dir)
-- change the current working directory to dest_dir
local prev_current_dir = sys.current_dir()
sys.change_dir(dest_dir)
-- execute git clone
if not cfg.debug then command = command .. " -q " end
local ok, err = sys.exec(command)
-- change the current working directory back
sys.change_dir(prev_current_dir)
return ok, err
end
-- Return table of all refs of the remote repository at the 'git_url'. Ref_type can be "tags" or "heads".
local function get_remote_refs(git_url, ref_type)
assert(type(git_url) == "string", "git.get_remote_refs: Argument 'git_url' is not a string.")
assert(type(ref_type) == "string", "git.get_remote_refs: Argument 'ref_type' is not a string.")
assert(ref_type == "tags" or ref_type == "heads", "git.get_remote_refs: Argument 'ref_type' is not \"tags\" or \"heads\".")
local refs = {}
local ok, refs_or_err = pcall(git.protocol.remotes, git_url)
if not ok then return nil, "Error getting refs of the remote repository '" .. git_url .. "': " .. refs_or_err end
for ref, sha in pairs(refs_or_err) do
if ref:match("%S+/" .. ref_type .. "/%S+") and not ref:match("%^{}") then
table.insert(refs, ref:match("%S+/" .. ref_type .. "/(%S+)"))
end
end
return refs
end
-- Return table of all tags of the repository at the 'git_url'
function get_remote_tags(git_url)
return get_remote_refs(git_url, "tags")
end
-- Return table of all branches of the repository at the 'git_url'
function get_remote_branches(git_url)
return get_remote_refs(git_url, "heads")
end
-- Checkout specified ref in specified git_repo_dir
function checkout_ref(ref, git_repo_dir, orphaned)
git_repo_dir = git_repo_dir or sys.current_dir()
orphaned = orphaned or false
assert(type(ref) == "string", "git.checkout_ref: Argument 'ref' is not a string.")
assert(type(git_repo_dir) == "string", "git.checkout_ref: Argument 'git_repo_dir' is not a string.")
assert(type(orphaned) == "boolean", "git.checkout_ref: Argument 'orphaned' is not a boolean.")
git_repo_dir = sys.abs_path(git_repo_dir)
local command = "git checkout "
if orphaned then command = command .. " --orphan " end
command = command .. " " .. ref .. " -f"
if not cfg.debug then command = command .. " -q " end
local ok, err
if git_repo_dir ~= sys.current_dir() then
local prev_current_dir = sys.current_dir()
sys.change_dir(git_repo_dir)
ok, err = sys.exec(command)
sys.change_dir(prev_current_dir)
else
ok, err = sys.exec(command)
end
return ok, err
end
-- Checkout specified sha in specified git_repo_dir
function checkout_sha(sha, git_repo_dir)
git_repo_dir = git_repo_dir or sys.current_dir()
assert(type(sha) == "string", "git.checkout_sha: Argument 'sha' is not a string.")
assert(type(git_repo_dir) == "string", "git.checkout_sha: Argument 'git_repo_dir' is not a string.")
git_repo_dir = sys.abs_path(git_repo_dir)
local dir_changed, prev_current_dir
if git_repo_dir ~= sys.current_dir() then
prev_current_dir = sys.current_dir()
sys.change_dir(git_repo_dir)
dir_changed = true
end
local ok, repo_or_err = pcall(git.repo.open, git_repo_dir)
if not ok then return nil, "Error when opening the git repository '" .. git_repo_dir .. "': " .. repo_or_err end
local err
ok, err = pcall(repo_or_err.checkout, repo_or_err, sha, git_repo_dir)
if not ok then return nil, "Error when checking out the sha '" .. sha .. "' in the git repository '" .. git_repo_dir .. "': " .. err end
repo_or_err:close()
if dir_changed then sys.change_dir(prev_current_dir) end
return true
end
-- Create an empty git repository in given directory.
function init(dir)
dir = dir or sys.current_dir()
assert(type(dir) == "string", "git.init: Argument 'dir' is not a string.")
dir = sys.abs_path(dir)
-- create the 'dir' first, since it causes 'git init' to fail on Windows
-- when the parent directory of 'dir' doesn't exist
local ok, err = sys.make_dir(dir)
if not ok then return nil, err end
local command = "git init " .. sys.quote(dir)
if not cfg.debug then command = command .. " -q " end
return sys.exec(command)
end
-- Add all files in the 'repo_dir' to the git index. The 'repo_dir' must be
-- in the initialized git repository.
function add_all(repo_dir)
repo_dir = repo_dir or sys.current_dir()
assert(type(repo_dir) == "string", "git.add_all: Argument 'repo_dir' is not a string.")
repo_dir = sys.abs_path(repo_dir)
local ok, prev_dir, msg
ok, prev_dir = sys.change_dir(repo_dir);
if not ok then return nil, err end
ok, msg = sys.exec("git add -A -f " .. sys.quote(repo_dir))
sys.change_dir(prev_dir)
return ok, msg
end
-- Commit all indexed files in 'repo_dir' with the given commit 'message'.
-- The 'repo_dir' must be in the initialized git repository.
function commit(message, repo_dir)
repo_dir = repo_dir or sys.current_dir()
message = message or "commit by luadist-git"
assert(type(message) == "string", "git.commit: Argument 'message' is not a string.")
assert(type(repo_dir) == "string", "git.commit: Argument 'repo_dir' is not a string.")
repo_dir = sys.abs_path(repo_dir)
local ok, prev_dir, msg
ok, prev_dir = sys.change_dir(repo_dir);
if not ok then return nil, err end
local command = "git commit -m " .. sys.quote(message)
if not cfg.debug then command = command .. " -q " end
ok, msg = sys.exec(command)
sys.change_dir(prev_dir)
return ok, msg
end
-- Rename branch 'old_name' to 'new_name'. -- The 'repo_dir' must be
-- in the initialized git repository and the branch 'new_name' must
-- not already exist in that repository.
function rename_branch(old_name, new_name, repo_dir)
repo_dir = repo_dir or sys.current_dir()
assert(type(old_name) == "string", "git.rename_branch: Argument 'old_name' is not a string.")
assert(type(new_name) == "string", "git.rename_branch: Argument 'new_name' is not a string.")
assert(type(repo_dir) == "string", "git.rename_branch: Argument 'repo_dir' is not a string.")
repo_dir = sys.abs_path(repo_dir)
local ok, prev_dir, msg
ok, prev_dir = sys.change_dir(repo_dir);
if not ok then return nil, err end
ok, msg = sys.exec("git branch -m " .. old_name .. " " .. new_name)
sys.change_dir(prev_dir)
return ok, msg
end
-- Push the ref 'ref_name' from the 'repo_dir' to the remote git
-- repository 'git_repo_url'. If 'all_tags' is set to true, all tags
-- will be pushed, in addition to the explicitly given ref.
-- If 'delete' is set to 'true' then the explicitly given remote ref
-- will be deleted, not pushed.
function push_ref(repo_dir, ref_name, git_repo_url, all_tags, delete)
repo_dir = repo_dir or sys.current_dir()
all_tags = all_tags or false
delete = delete or false
assert(type(repo_dir) == "string", "git.push_ref: Argument 'repo_dir' is not a string.")
assert(type(git_repo_url) == "string", "git.push_ref: Argument 'git_repo_url' is not a string.")
assert(type(ref_name) == "string", "git.push_ref: Argument 'ref_name' is not a string.")
assert(type(all_tags) == "boolean", "git.push_ref: Argument 'all_tags' is not a boolean.")
assert(type(delete) == "boolean", "git.push_ref: Argument 'delete' is not a boolean.")
repo_dir = sys.abs_path(repo_dir)
local ok, prev_dir, msg
ok, prev_dir = sys.change_dir(repo_dir);
if not ok then return nil, err end
local command = "git push " .. git_repo_url
if all_tags then command = command .. " --tags " end
if delete then command = command .. " --delete " end
command = command .. " " .. ref_name .. " -f "
if not cfg.debug then command = command .. " -q " end
ok, msg = sys.exec(command)
sys.change_dir(prev_dir)
return ok, msg
end
-- Creates the tag 'tag_name' in given 'repo_dir', which must be
-- in the initialized git repository
function create_tag(repo_dir, tag_name)
repo_dir = repo_dir or sys.current_dir()
assert(type(repo_dir) == "string", "git.create_tag: Argument 'repo_dir' is not a string.")
assert(type(tag_name) == "string", "git.create_tag: Argument 'tag_name' is not a string.")
repo_dir = sys.abs_path(repo_dir)
local ok, prev_dir, msg
ok, prev_dir = sys.change_dir(repo_dir);
if not ok then return nil, err end
ok, msg = sys.exec("git tag " .. tag_name .. " -f ")
sys.change_dir(prev_dir)
return ok, msg
end
-- Fetch given 'ref_name' from the remote 'git_repo_url' to the local repository
-- 'repo_dir' and return its sha. 'ref_type' can be "tag" or "head".
local function fetch_ref(repo_dir, git_repo_url, ref_name, ref_type)
repo_dir = repo_dir or sys.current_dir()
assert(type(repo_dir) == "string", "git.fetch_ref: Argument 'repo_dir' is not a string.")
assert(type(git_repo_url) == "string", "git.fetch_ref: Argument 'git_repo_url' is not a string.")
assert(type(ref_name) == "string", "git.fetch_ref: Argument 'ref_name' is not a string.")
assert(type(ref_type) == "string", "git.fetch_ref: Argument 'ref_type' is not a string.")
assert(ref_type == "tag" or ref_type == "head", "git.get_remote_refs: Argument 'ref_type' is not \"tag\" or \"head\".")
repo_dir = sys.abs_path(repo_dir)
local refstring = "refs/" .. ref_type .. "s/" .. ref_name
local suppress_fetch_progress = not cfg.debug
local ok, repo_or_err = pcall(git.repo.open, repo_dir)
if not ok then return nil, "Error when opening the git repository '" .. repo_dir .. "': " .. repo_or_err end
local ok, pack_or_err, sha = pcall(git.protocol.fetch, git_repo_url, repo_or_err, refstring, suppress_fetch_progress)
if not ok then return nil, "Error when fetching ref '" .. refstring .. "' from git repository '" .. git_repo_url .. "': " .. pack_or_err end
repo_or_err:close()
pack_or_err:close()
return sha
end
-- Fetch given 'tag_name' from the remote 'git_repo_url' to the local repository
-- 'repo_dir' and save it as a tag with the same 'tag_name'.
function fetch_tag(repo_dir, git_repo_url, tag_name)
return fetch_ref(repo_dir, git_repo_url, tag_name, "tag")
end
-- Fetch given 'branch_name' from the remote 'git_repo_url' to the local repository
-- 'repo_dir' and save it as a branch with the same 'branch_name'.
function fetch_branch(repo_dir, git_repo_url, branch_name)
return fetch_ref(repo_dir, git_repo_url, branch_name, "head")
end
-- Create the git repository and return the repo object (which can be used in checkout_sha etc.)
-- If the 'dir' exists, it's deleted prior to creating the git repository.
function create_repo(dir)
assert(type(dir) == "string", "git.create_repo: Argument 'dir' is not a string.")
if sys.exists(dir) then sys.delete(dir) end
local ok, repo_or_err = pcall(git.repo.create, dir)
if not ok then return nil, "Error when creating the git repository '" .. dir .. "': " .. repo_or_err end
repo_or_err:close()
return true
end

View File

@ -0,0 +1,349 @@
-- main API of LuaDist
module ("dist", package.seeall)
local cfg = require "dist.config"
local depends = require "dist.depends"
local git = require "dist.git"
local sys = require "dist.sys"
local package = require "dist.package"
local mf = require "dist.manifest"
local utils = require "dist.utils"
-- Return the deployment directory.
function get_deploy_dir()
return sys.abs_path(cfg.root_dir)
end
-- Return packages deployed in 'deploy_dir' also with their provides.
function get_deployed(deploy_dir)
deploy_dir = deploy_dir or cfg.root_dir
assert(type(deploy_dir) == "string", "dist.get_deployed: Argument 'deploy_dir' is not a string.")
deploy_dir = sys.abs_path(deploy_dir)
local deployed = depends.get_installed(deploy_dir)
local provided = {}
for _, pkg in pairs(deployed) do
for _, provided_pkg in pairs(depends.get_provides(pkg)) do
provided_pkg.provided_by = pkg.name .. "-" .. pkg.version
table.insert(provided, provided_pkg)
end
end
for _, provided_pkg in pairs(provided) do
table.insert(deployed, provided_pkg)
end
deployed = depends.sort_by_names(deployed)
return deployed
end
-- Download new 'manifest_file' from repository and returns it.
-- Return nil and error message on error.
function update_manifest(deploy_dir)
deploy_dir = deploy_dir or cfg.root_dir
assert(type(deploy_dir) == "string", "dist.update_manifest: Argument 'deploy_dir' is not a string.")
deploy_dir = sys.abs_path(deploy_dir)
-- TODO: use 'deploy_dir' argument in manifest functions
-- retrieve the new manifest (forcing no cache use)
local manifest, err = mf.get_manifest(nil, true)
if manifest then
return manifest
else
return nil, err
end
end
-- Install 'package_names' to 'deploy_dir', using optional CMake 'variables'.
function install(package_names, deploy_dir, variables)
if not package_names then return true end
deploy_dir = deploy_dir or cfg.root_dir
if type(package_names) == "string" then package_names = {package_names} end
assert(type(package_names) == "table", "dist.install: Argument 'package_names' is not a table or string.")
assert(type(deploy_dir) == "string", "dist.install: Argument 'deploy_dir' is not a string.")
deploy_dir = sys.abs_path(deploy_dir)
-- find installed packages
local installed = depends.get_installed(deploy_dir)
-- get manifest
local manifest, err = mf.get_manifest()
if not manifest then return nil, "Error getting manifest: " .. err end
-- get dependency manifest
-- TODO: Is it good that dep_manifest is deploy_dir-specific?
-- Probably it'd be better not to be specific, but then there're
-- problems with 'provides'. E.g. What to do if there's a module
-- installed, that is provided by two different modules in two deploy_dirs?
local dep_manifest_file = sys.abs_path(sys.make_path(deploy_dir, cfg.dep_cache_file))
local dep_manifest, status = {}
if sys.exists(dep_manifest_file) and not utils.cache_timeout_expired(cfg.cache_timeout, dep_manifest_file) then
status, dep_manifest = mf.load_manifest(dep_manifest_file)
if not dep_manifest then return nil, status end
end
-- resolve dependencies
local dependencies, dep_manifest_or_err = depends.get_depends(package_names, installed, manifest, dep_manifest, deploy_dir, false, false)
if not dependencies then return nil, dep_manifest_or_err end
if #dependencies == 0 then return nil, "No packages to install." end
-- save updated dependency manifest
local ok, err = sys.make_dir(sys.parent_dir(dep_manifest_file))
if not ok then return nil, err end
ok, err = mf.save_manifest(dep_manifest_or_err, dep_manifest_file)
if not ok then return nil, err end
-- fetch the packages from repository
local fetched_pkgs = {}
for _, pkg in pairs(dependencies) do
local fetched_pkg, err = package.fetch_pkg(pkg, sys.make_path(deploy_dir, cfg.temp_dir))
if not fetched_pkg then return nil, err end
table.insert(fetched_pkgs, fetched_pkg)
end
-- install fetched packages
for _, pkg in pairs(fetched_pkgs) do
local ok, err = package.install_pkg(pkg.download_dir, deploy_dir, variables, pkg.preserve_pkg_dir)
if not ok then return nil, err end
end
return true
end
-- Manually deploy packages from 'package_dirs' to 'deploy_dir', using optional
-- CMake 'variables'. The 'package_dirs' are preserved (will not be deleted).
function make(deploy_dir, package_dirs, variables)
deploy_dir = deploy_dir or cfg.root_dir
package_dirs = package_dirs or {}
assert(type(deploy_dir) == "string", "dist.make: Argument 'deploy_dir' is not a string.")
assert(type(package_dirs) == "table", "dist.make: Argument 'package_dirs' is not a table.")
deploy_dir = sys.abs_path(deploy_dir)
for _, dir in pairs(package_dirs) do
local ok, err = package.install_pkg(sys.abs_path(dir), deploy_dir, variables, true)
if not ok then return nil, err end
end
return true
end
-- Remove 'package_names' from 'deploy_dir' and return the number of removed
-- packages.
function remove(package_names, deploy_dir)
deploy_dir = deploy_dir or cfg.root_dir
if type(package_names) == "string" then package_names = {package_names} end
assert(type(package_names) == "table", "dist.remove: Argument 'package_names' is not a string or table.")
assert(type(deploy_dir) == "string", "dist.remove: Argument 'deploy_dir' is not a string.")
deploy_dir = sys.abs_path(deploy_dir)
local pkgs_to_remove = {}
local installed = depends.get_installed(deploy_dir)
-- find packages to remove
if #package_names == 0 then
pkgs_to_remove = installed
else
pkgs_to_remove = depends.find_packages(package_names, installed)
end
-- remove them
for _, pkg in pairs(pkgs_to_remove) do
local pkg_distinfo_dir = sys.make_path(cfg.distinfos_dir, pkg.name .. "-" .. pkg.version)
local ok, err = package.remove_pkg(pkg_distinfo_dir, deploy_dir)
if not ok then return nil, err end
end
return #pkgs_to_remove
end
-- Download 'pkg_names' to 'fetch_dir' and return the table of their directories.
function fetch(pkg_names, fetch_dir)
fetch_dir = fetch_dir or sys.current_dir()
assert(type(pkg_names) == "table", "dist.fetch: Argument 'pkg_names' is not a string or table.")
assert(type(fetch_dir) == "string", "dist.fetch: Argument 'fetch_dir' is not a string.")
fetch_dir = sys.abs_path(fetch_dir)
local manifest = mf.get_manifest()
local pkgs_to_fetch = {}
for _, pkg_name in pairs(pkg_names) do
-- retrieve available versions
local versions, err = package.retrieve_versions(pkg_name, manifest)
if not versions then return nil, err end
for _, version in pairs(versions) do
table.insert(manifest, version)
end
local packages = depends.find_packages(pkg_name, manifest)
if #packages == 0 then return nil, "No packages found for '" .. pkg_name .. "'." end
packages = depends.sort_by_versions(packages)
table.insert(pkgs_to_fetch, packages[1])
end
local fetched_dirs = {}
for _, pkg in pairs(pkgs_to_fetch) do
local fetched_pkg, err = package.fetch_pkg(pkg, fetch_dir)
if not fetched_pkg then return nil, err end
table.insert(fetched_dirs, fetched_pkg.download_dir)
end
return fetched_dirs
end
-- Upload binary version of given modules installed in the specified
-- 'deploy_dir' to the repository specified by provided base url.
-- Return the number of uploaded packages.
--
-- Organization of uploaded modules and their repositories is subject
-- to the following conventions:
-- - destination repository is: 'DEST_GIT_BASE_URL/MODULE_NAME'
-- - module will be uploaded to the branch: 'ARCH-TYPE' according
-- to the arch and type of the user's machine
-- - the module will be tagged as: 'VERSION-ARCH-TYPE' (if the tag already
-- exists, it will be overwritten)
--
-- E.g. assume that the module 'lua-5.1.4' is installed on the 32bit Linux
-- system (Linux-i686). When this function is called with the module name
-- 'lua' and base url 'git@github.com:LuaDist', then the binary version
-- of the module 'lua', that is installed on the machine, will be uploaded
-- to the branch 'Linux-i686' of the repository 'git@github.com:LuaDist/lua.git'
-- and tagged as '5.1.4-Linux-i686'.
function upload_modules(deploy_dir, module_names, dest_git_base_url)
deploy_dir = deploy_dir or cfg.root_dir
if type(module_names) == "string" then module_names = {module_names} end
assert(type(deploy_dir) == "string", "dist.upload_module: Argument 'deploy_dir' is not a string.")
assert(type(module_names) == "table", "dist.upload_module: Argument 'module_name' is not a string or table.")
assert(type(dest_git_base_url) == "string", "dist.upload_module: Argument 'dest_git_base_url' is not a string.")
deploy_dir = sys.abs_path(deploy_dir)
local modules_to_upload = {}
local installed = depends.get_installed(deploy_dir)
-- find modules to upload
if #module_names == 0 then
modules_to_upload = installed
else
modules_to_upload = depends.find_packages(module_names, installed)
end
for _, installed_module in pairs(modules_to_upload) do
-- set names
local branch_name = cfg.arch .. "-" .. cfg.type
local tag_name = installed_module.version .. "-" .. branch_name
local full_name = installed_module.name .. "-" .. tag_name
local tmp_dir = sys.make_path(deploy_dir, cfg.temp_dir, full_name .. "-to-upload")
local dest_git_url = dest_git_base_url .. "/" .. installed_module.name .. ".git"
local distinfo_file = sys.make_path(deploy_dir, cfg.distinfos_dir, installed_module.name .. "-" .. installed_module.version, "dist.info")
-- create temporary directory (delete previous if already exists)
if sys.exists(tmp_dir) then sys.delete(tmp_dir) end
local ok, err = sys.make_dir(tmp_dir)
if not ok then return nil, err end
-- copy the module files for all enabled components
for _, component in ipairs(cfg.components) do
if installed_module.files[component] then
for _, file in ipairs(installed_module.files[component]) do
local file_path = sys.make_path(deploy_dir, file)
local dest_dir = sys.parent_dir(sys.make_path(tmp_dir, file))
if sys.is_file(file_path) then
sys.make_dir(dest_dir)
sys.copy(file_path, dest_dir)
end
end
end
end
-- add module's dist.info file
sys.copy(distinfo_file, tmp_dir)
-- create git repo
ok, err = git.init(tmp_dir)
if not ok then return nil, "Error initializing empty git repository in '" .. tmp_dir .. "': " .. err end
-- add all files
ok, err = git.add_all(tmp_dir)
if not ok then return nil, "Error adding all files to the git index in '" .. tmp_dir .. "': " .. err end
-- create commit
ok, err = git.commit("[luadist-git] add " .. full_name .. " [ci skip]", tmp_dir)
if not ok then return nil, "Error commiting changes in '" .. tmp_dir .. "': " .. err end
-- rename branch
ok, err = git.rename_branch("master", branch_name, tmp_dir)
if not ok then return nil, "Error renaming branch 'master' to '" .. branch_name .. "' in '" .. tmp_dir .. "': " .. err end
-- create tag
ok, err = git.create_tag(tmp_dir, tag_name)
if not ok then return nil, "Error creating tag '" .. tag_name .. "' in '" .. tmp_dir .. "': " .. err end
print("Uploading " .. full_name .. " to " .. dest_git_url .. "...")
-- push to the repository
ok, err = git.push_ref(tmp_dir, branch_name, dest_git_url, true)
if not ok then return nil, "Error when pushing branch '" .. branch_name .. "' and tag '" .. tag_name .. "' to '" .. dest_git_url .. "': " .. err end
-- delete temporary directory (if not in debug mode)
if not cfg.debug then sys.delete(tmp_dir) end
end
return #modules_to_upload
end
-- Returns table with information about module's dependencies, using the cache.
function dependency_info(module, deploy_dir)
cache_file = cache_file or sys.abs_path(sys.make_path(cfg.root_dir, cfg.dep_cache_file))
assert(type(module) == "string", "dist.dependency_info: Argument 'module' is not a string.")
assert(type(deploy_dir) == "string", "dist.dependency_info: Argument 'deploy_dir' is not a string.")
-- get manifest
local manifest, err = mf.get_manifest()
if not manifest then return nil, "Error getting manifest: " .. err end
-- get dependency manifest
-- TODO: Is it good that dep_manifest is deploy_dir-specific?
-- Probably it'd be better not to be specific, but then there're
-- problems with 'provides'. E.g. What to do if there's a module
-- installed, that is provided by two different modules in two deploy_dirs?
local dep_manifest_file = sys.abs_path(sys.make_path(deploy_dir, cfg.dep_cache_file))
local dep_manifest, status = {}
if sys.exists(dep_manifest_file) and cfg.cache and not utils.cache_timeout_expired(cfg.cache_timeout, dep_manifest_file) then
status, dep_manifest = mf.load_manifest(dep_manifest_file)
if not dep_manifest then return nil, status end
end
-- force getting the dependency information
local installed = {}
-- resolve dependencies
local dependencies, dep_manifest_or_err = depends.get_depends(module, installed, manifest, dep_manifest, deploy_dir, false, true and not cfg.debug)
if not dependencies then return nil, dep_manifest_or_err end
-- save updated dependency manifest
local ok, err = sys.make_dir(sys.parent_dir(dep_manifest_file))
if not ok then return nil, err end
ok, err = mf.save_manifest(dep_manifest_or_err, dep_manifest_file)
if not ok then return nil, err end
-- collect just relevant dependencies from dependency manifest
local relevant_deps = {}
for _, dep in pairs(dependencies) do
local name_ver = dep.name .. "-" .. (dep.was_scm_version and "scm" or dep.version)
if dep_manifest_or_err[name_ver] then
table.insert(relevant_deps, dep_manifest_or_err[name_ver])
else
return nil, "Error: dependency information for '" .. name_ver .. "' not found in dependency manifest."
end
end
return relevant_deps
end

View File

@ -0,0 +1,64 @@
-- Simple logger for LuaDist.
module ("dist.logger", package.seeall)
local cfg = require "dist.config"
local sys = require "dist.sys"
-- Open 'log_file' and return a log, or nil and error msg on error.
local function get_log(log_file)
log_file = log_file or cfg.log_file
assert(type(log_file) == "string", "log.get_log: Argument 'log_file' is not a string.")
log_file = sys.abs_path(log_file)
sys.make_dir(sys.parent_dir(log_file))
local log, err = io.open(log_file, "a")
if not log then
return nil, "Error: can't open a logfile '" .. log_file .. "': " .. err
else
return log
end
end
-- Set the default log.
local log_file = get_log(cfg.log_file)
-- Log levels used.
local log_levels = {
DEBUG = 0, -- Fine-grained informational events that are most useful to debug an application.
INFO = 1, -- Informational messages that highlight the progress of the application at coarse-grained level.
WARN = 2, -- Potentially harmful situations.
ERROR = 3, -- Error events that might still allow the application to continue running.
FATAL = 4, -- Very severe error events that would presumably lead the application to abort.
}
-- Write 'message' with 'level' to 'log'.
local function write(level, ...)
assert(type(level) == "string", "log.write: Argument 'level' is not a string.")
assert(#arg > 0, "log.write: No message arguments provided.")
assert(type(log_levels[level]) == "number", "log.write: Unknown log level used: '" .. level .. "'.")
level = level:upper()
local message = table.concat(arg, " ")
-- Check if writing for this log level is enabled.
if cfg.write_log_level and log_levels[level] >= log_levels[cfg.write_log_level] then
log_file:write(os.date("%Y-%m-%d %H:%M:%S") .. " [" .. level .. "]\t" .. message .. "\n")
log_file:flush()
end
-- Check if printing for this log level is enabled.
if cfg.print_log_level and log_levels[level] >= log_levels[cfg.print_log_level] then
print(message)
end
end
-- Functions with defined log levels for simple use.
function debug(...) return write("DEBUG", ...) end
function info(...) return write("INFO", ...) end
function warn(...) return write("WARN", ...) end
function error(...) return write("ERROR", ...) end
function fatal(...) return write("FATAL", ...) end
-- Function with explicitly specified log level.
function log(level, ...) return write(level, ...) end

View File

@ -0,0 +1,248 @@
-- Working with manifest and dist.info files
module ("dist.manifest", package.seeall)
local cfg = require "dist.config"
local git = require "dist.git"
local sys = require "dist.sys"
local utils = require "dist.utils"
-- Return the manifest table from 'manifest_file'. If the manifest is in cache,
-- then the cached version is used. You can set the cache timeout value in
-- 'config.cache_timeout' variable.
-- If optional 'force_no_cache' parameter is true, then the cache is not used.
function get_manifest(manifest_file, force_no_cache)
manifest_file = manifest_file or sys.make_path(cfg.root_dir, cfg.manifest_file)
force_no_cache = force_no_cache or false
assert(type(manifest_file) == "string", "manifest.get_manifest: Argument 'manifest_file' is not a string.")
assert(type(force_no_cache) == "boolean", "manifest.get_manifest: Argument 'force_no_cache' is not a boolean.")
manifest_file = sys.abs_path(manifest_file)
-- download new manifest to the cache if not present or cache not used or cache expired
if not sys.exists(manifest_file) or force_no_cache or not cfg.cache or utils.cache_timeout_expired(cfg.cache_timeout, manifest_file) then
local manifest_dest = sys.parent_dir(manifest_file) or sys.current_dir()
local ok, err = download_manifest(manifest_dest, cfg.repos)
if not ok then return nil, "Error when downloading manifest: " .. err end
end
-- load manifest from cache
local status, ret = load_manifest(manifest_file)
if not status then return nil, "Error when loading manifest: " .. ret end
return ret
end
-- Download manifest from the table of git 'repository_urls' to 'dest_dir' and return true on success
-- and nil and error message on error.
function download_manifest(dest_dir, repository_urls)
dest_dir = dest_dir or sys.make_path(cfg.root_dir, cfg.cache_dir)
repository_urls = repository_urls or cfg.repos
if type(repository_urls) == "string" then repository_urls = {repository_urls} end
assert(type(dest_dir) == "string", "manifest.download_manifest: Argument 'dest_dir' is not a string.")
assert(type(repository_urls) == "table", "manifest.download_manifest: Argument 'repository_urls' is not a table or string.")
dest_dir = sys.abs_path(dest_dir)
-- define used files and directories
local manifest_filename = sys.extract_name(cfg.manifest_file)
local manifest_file = sys.make_path(dest_dir, manifest_filename)
local temp_dir = sys.make_path(cfg.root_dir, cfg.temp_dir)
-- ensure that destination directory exists
local ok, err = sys.make_dir(dest_dir)
if not ok then return nil, err end
-- retrieve manifests from repositories and collect them into one manifest table
local manifest = {}
if #repository_urls == 0 then return nil, "No repository url specified." end
print("Downloading repository information...")
for k, repo in pairs(repository_urls) do
local clone_dir = sys.make_path(temp_dir, "repository_" .. tostring(k))
-- clone the repo and add its '.gitmodules' file to the manifest table
ok, err = git.create_repo(clone_dir)
local sha
if ok then sha, err = git.fetch_branch(clone_dir, repo, "master") end
if sha then ok, err = git.checkout_sha(sha, clone_dir) end
if not (ok and sha) then
if not cfg.debug then sys.delete(clone_dir) end
return nil, "Error when downloading the manifest from repository with url: '" .. repo .. "': " .. err
else
for _, pkg in pairs(load_gitmodules(sys.make_path(clone_dir, ".gitmodules"))) do
table.insert(manifest, pkg)
end
end
if not cfg.debug then sys.delete(clone_dir) end
end
-- save the new manifest table to the file
ok, err = save_manifest(manifest, manifest_file)
if not ok then return nil, err end
return true
end
-- A secure loadfile function
-- If file code chunk has upvalues, the first upvalue is set to the given
-- environement, if that parameter is given, or to the value of the global environment.
local function secure_loadfile(file, env)
assert(type(file) == "string", "secure_loadfile: Argument 'file' is not a string.")
-- use the given (or create a new) restricted environment
local env = env or {}
-- load the file and run in a protected call with the restricted env
-- setfenv is deprecated in lua 5.2 in favor of giving env in arguments
-- the additional loadfile arguments are simply ignored for previous lua versions
local f, err = loadfile(file, 'bt', env)
if f then
if setfenv ~= nil then
setfenv(f, env)
end
return pcall(f)
else
return nil, err
end
end
-- Load and return manifest table from the manifest file.
-- If manifest file not present, return nil.
function load_manifest(manifest_file)
manifest_file = manifest_file or sys.make_path(cfg.root_dir, cfg.manifest_file)
return secure_loadfile(sys.abs_path(manifest_file))
end
-- Load '.gitmodules' file and returns manifest table.
-- If the file is not present, return nil.
function load_gitmodules(gitmodules_file)
gitmodules_file = gitmodules_file or sys.make_path(cfg.root_dir, cfg.manifest_file)
assert(type(gitmodules_file) == "string", "manifest.load_gitmodules: Argument 'gitmodules_file' is not a string.")
gitmodules_file = sys.abs_path(gitmodules_file)
if sys.exists(gitmodules_file) then
-- load the .gitmodules file
local file, err = io.open(gitmodules_file, "r")
if not file then return nil, "Error when opening the .gitmodules file '" .. gitmodules_file .. "':" .. err end
local mf_text = file:read("*a")
file:close()
if not mf_text then return nil, "Error when reading the .gitmodules file '" .. gitmodules_file .. "':" .. err end
manifest = {}
for url in mf_text:gmatch("git://%S+/%S+") do
pkg = {name = url:match("git://%S+/(%S+)%.git") or url:match("git://%S+/(%S+)"), version = "scm", path = url}
table.insert(manifest, pkg)
end
return manifest
else
return nil, "Error when loading the .gitmodules: file '" .. gitmodules_file .. "' doesn't exist."
end
end
-- Save manifest table to the 'file'
function save_manifest(manifest_table, file)
assert(type(manifest_table) == "table", "manifest.save_distinfo: Argument 'manifest_table' is not a table.")
assert(type(file) == "string", "manifest.save_distinfo: Argument 'file' is not a string.")
file = sys.abs_path(file)
-- Print table 'tbl' to io stream 'file'.
local function print_table(file, tbl, in_nested_table)
for k, v in pairs(tbl) do
-- print key
if in_nested_table then file:write("\t\t") end
if type(k) ~= "number" then
file:write("['" .. k .. "']" .. " = ")
end
-- print value
if type(v) == "table" then
file:write("{\n")
print_table(file, v, true)
if in_nested_table then file:write("\t") end
file:write("\t}")
else
if in_nested_table then file:write("\t") end
if type(v) == "string" then
file:write('[[' .. v .. ']]')
else
file:write(tostring(v))
end
end
file:write(",\n")
end
end
local manifest_file = io.open(file, "w")
if not manifest_file then return nil, "Error when saving manifest: cannot open the file '" .. file .. "'." end
manifest_file:write('return {\n')
print_table(manifest_file, manifest_table)
manifest_file:write('},\ntrue')
manifest_file:close()
return true
end
-- Load and return package info table from the distinfo_file file.
-- If file not present, return nil.
function load_distinfo(distinfo_file)
assert(type(distinfo_file) == "string", "manifest.load_distinfo: Argument 'distinfo_file' is not a string.")
distinfo_file = sys.abs_path(distinfo_file)
-- load the distinfo file
local distinfo_env = {}
local status, ret = secure_loadfile(distinfo_file, distinfo_env)
if not status then return nil, "Error when loading package info: " .. ret end
return distinfo_env
end
-- Save distinfo table to the 'file'
function save_distinfo(distinfo_table, file)
assert(type(distinfo_table) == "table", "manifest.save_distinfo: Argument 'distinfo_table' is not a table.")
assert(type(file) == "string", "manifest.save_distinfo: Argument 'file' is not a string.")
file = sys.abs_path(file)
-- Print table 'tbl' to io stream 'file'.
local function print_table(file, tbl, in_nested_table)
for k, v in pairs(tbl) do
-- print key
if type(k) ~= "number" then
file:write(k .. " = ")
end
-- print value
if type(v) == "table" then
file:write("{\n")
print_table(file, v, true)
file:write("}\n")
elseif type(v) == "string" then
if in_nested_table then
file:write('[[' .. v .. ']]')
else
file:write('"' .. v .. '"')
end
else
file:write(v)
end
if in_nested_table then
file:write(",")
end
file:write("\n")
end
end
local distinfo_file = io.open(file, "w")
if not distinfo_file then return nil, "Error when saving dist-info table: cannot open the file '" .. file .. "'." end
print_table(distinfo_file, distinfo_table)
distinfo_file:close()
return true
end

View File

@ -0,0 +1,596 @@
-- Package functions
module ("dist.package", package.seeall)
local cfg = require "dist.config"
local git = require "dist.git"
local sys = require "dist.sys"
local mf = require "dist.manifest"
local utils = require "dist.utils"
local depends = require "dist.depends"
-- Return whether the package in given 'pkg_dir' is of a source type.
function is_source_type(pkg_dir)
assert(type(pkg_dir) == "string", "package.is_source_type: Argument 'pkg_dir' is not a string.")
pkg_dir = sys.abs_path(pkg_dir)
return utils.to_boolean(sys.exists(sys.make_path(pkg_dir, "CMakeLists.txt")))
end
-- Ensure proper arch and type for the given source 'dist_info' table and return it.
-- WARNING: this function should be used only for 'dist_info' tables of modules that are of a source type!
function ensure_source_arch_and_type(dist_info)
assert(type(dist_info) == "table", "package.ensure_source_arch_and_type: Argument 'dist_info' is not a table.")
dist_info.arch = dist_info.arch or "Universal"
dist_info.type = dist_info.type or "source"
return dist_info
end
-- Remove package from 'pkg_distinfo_dir' of 'deploy_dir'.
function remove_pkg(pkg_distinfo_dir, deploy_dir)
deploy_dir = deploy_dir or cfg.root_dir
assert(type(pkg_distinfo_dir) == "string", "package.remove_pkg: Argument 'pkg_distinfo_dir' is not a string.")
assert(type(deploy_dir) == "string", "package.remove_pkg: Argument 'deploy_dir' is not a string.")
deploy_dir = sys.abs_path(deploy_dir)
local abs_pkg_distinfo_dir = sys.make_path(deploy_dir, pkg_distinfo_dir)
-- check for 'dist.info'
local info, err = mf.load_distinfo(sys.make_path(abs_pkg_distinfo_dir, "dist.info"))
if not info then return nil, "Error removing package from '" .. pkg_distinfo_dir .. "' - it doesn't contain valid 'dist.info' file." end
if not info.files then return nil, "File '" .. sys.make_path(pkg_distinfo_dir, "dist.info") .."' doesn't contain list of installed files." end
-- remove files installed as components of this package
for _, component in ipairs(cfg.components) do
if info.files[component] then
for i = #info.files[component], 1, -1 do
local f = info.files[component][i]
f = sys.make_path(deploy_dir,f)
if sys.is_file(f) then
sys.delete(f)
elseif sys.is_dir(f) then
local dir_files, err = sys.get_file_list(f)
if not dir_files then return nil, "Error removing package in '" .. abs_pkg_distinfo_dir .. "': " .. err end
if #dir_files == 0 then sys.delete(f) end
end
-- delete also all parent directories if empty
local parents = sys.parents_up_to(f, deploy_dir)
for _, parent in ipairs(parents) do
if sys.is_dir(parent) then
local dir_files, err = sys.get_file_list(parent)
if not dir_files then return nil, "Error removing package in '" .. abs_pkg_distinfo_dir .. "': " .. err end
if #dir_files == 0 then
sys.delete(parent)
end
end
end
end
end
end
-- remove removed components also from 'dist.info'
for _, component in ipairs(cfg.components) do
info.files[component] = nil
end
-- delete the package information from deploy_dir
local ok = sys.delete(abs_pkg_distinfo_dir)
if not ok then return nil, "Error removing package in '" .. abs_pkg_distinfo_dir .. "'." end
-- if the package was not completely removed (e.g. some components remain),
-- save the new version of its 'dist.info'
local comp_num = 0
for _, _ in pairs(info.files) do comp_num = comp_num + 1 end
if comp_num ~= 0 then
sys.make_dir(abs_pkg_distinfo_dir)
local ok, err = mf.save_distinfo(info, sys.make_path(abs_pkg_distinfo_dir, "dist.info"))
if not ok then return nil, "Error resaving the 'dist.info': " .. err end
end
return ok
end
-- Install package from 'pkg_dir' to 'deploy_dir', using optional CMake 'variables'.
-- Optional 'preserve_pkg_dir' argument specified whether to preserve the 'pkg_dir'.
function install_pkg(pkg_dir, deploy_dir, variables, preserve_pkg_dir)
deploy_dir = deploy_dir or cfg.root_dir
variables = variables or {}
preserve_pkg_dir = preserve_pkg_dir or false
assert(type(pkg_dir) == "string", "package.install_pkg: Argument 'pkg_dir' is not a string.")
assert(type(deploy_dir) == "string", "package.install_pkg: Argument 'deploy_dir' is not a string.")
assert(type(variables) == "table", "package.install_pkg: Argument 'variables' is not a table.")
assert(type(preserve_pkg_dir) == "boolean", "package.install_pkg: Argument 'preserve_pkg_dir' is not a boolean.")
pkg_dir = sys.abs_path(pkg_dir)
deploy_dir = sys.abs_path(deploy_dir)
-- check for dist.info
local info, err = mf.load_distinfo(sys.make_path(pkg_dir, "dist.info"))
if not info then return nil, "Error installing: the directory '" .. pkg_dir .. "' doesn't exist or doesn't contain valid 'dist.info' file." end
-- check if the package is source
if is_source_type(pkg_dir) then info = ensure_source_arch_and_type(info) end
-- check package's architecture
if not (info.arch == "Universal" or info.arch == cfg.arch) then
return nil, "Error installing '" .. info.name .. "-" .. info.version .. "': architecture '" .. info.arch .. "' is not suitable for this machine."
end
-- check package's type
if not (info.type == "all" or info.type == "source" or info.type == cfg.type) then
return nil, "Error installing '" .. info.name .. "-" .. info.version .. "': architecture type '" .. info.type .. "' is not suitable for this machine."
end
local ok, err
-- if package is of binary type, just deploy it
if info.type ~= "source" then
ok, err = deploy_binary_pkg(pkg_dir, deploy_dir)
-- else build and then deploy
else
-- check if we have cmake
ok = utils.system_dependency_available("cmake", "cmake --version")
if not ok then return nil, "Error when installing: Command 'cmake' not available on the system." end
-- set cmake variables
local cmake_variables = {}
-- set variables from config file
for k, v in pairs(cfg.variables) do
cmake_variables[k] = v
end
-- set variables specified as argument
for k, v in pairs(variables) do
cmake_variables[k] = v
end
cmake_variables.CMAKE_INCLUDE_PATH = table.concat({cmake_variables.CMAKE_INCLUDE_PATH or "", sys.make_path(deploy_dir, "include")}, ";")
cmake_variables.CMAKE_LIBRARY_PATH = table.concat({cmake_variables.CMAKE_LIBRARY_PATH or "", sys.make_path(deploy_dir, "lib"), sys.make_path(deploy_dir, "bin")}, ";")
cmake_variables.CMAKE_PROGRAM_PATH = table.concat({cmake_variables.CMAKE_PROGRAM_PATH or "", sys.make_path(deploy_dir, "bin")}, ";")
-- build the package and deploy it
ok, err = build_pkg(pkg_dir, deploy_dir, cmake_variables)
if not ok then return nil, err end
end
-- delete directory of fetched package
if not (cfg.debug or preserve_pkg_dir) then sys.delete(pkg_dir) end
return ok, err
end
-- Build and deploy package from 'src_dir' to 'deploy_dir' using 'variables'.
-- Return directory to which the package was built or nil on error.
-- 'variables' is table of optional CMake variables.
function build_pkg(src_dir, deploy_dir, variables)
deploy_dir = deploy_dir or cfg.root_dir
variables = variables or {}
assert(type(src_dir) == "string", "package.build_pkg: Argument 'src_dir' is not a string.")
assert(type(deploy_dir) == "string", "package.build_pkg: Argument 'deploy_dir' is not a string.")
assert(type(variables) == "table", "package.build_pkg: Argument 'variables' is not a table.")
src_dir = sys.abs_path(src_dir)
deploy_dir = sys.abs_path(deploy_dir)
-- check for dist.info
local info, err = mf.load_distinfo(sys.make_path(src_dir, "dist.info"))
if not info then return nil, "Error building package from '" .. src_dir .. "': it doesn't contain valid 'dist.info' file." end
local pkg_name = info.name .. "-" .. info.version
-- set machine information
info.arch = cfg.arch
info.type = cfg.type
-- create CMake build dir
local cmake_build_dir = sys.abs_path(sys.make_path(deploy_dir, cfg.temp_dir, pkg_name .. "-CMake-build"))
sys.make_dir(cmake_build_dir)
-- create cmake cache
variables["CMAKE_INSTALL_PREFIX"] = deploy_dir
local cache_file = io.open(sys.make_path(cmake_build_dir, "cache.cmake"), "w")
if not cache_file then return nil, "Error creating CMake cache file in '" .. cmake_build_dir .. "'" end
-- Fill in cache variables
for k,v in pairs(variables) do
cache_file:write("SET(" .. k .. " " .. sys.quote(v):gsub("\\+", "/") .. " CACHE STRING \"\" FORCE)\n")
end
-- If user cache file is provided then append it
if cfg.cache_file ~= "" then
local user_cache = io.open(sys.abs_path(cfg.cache_file), "r")
if user_cache then
cache_file:write(user_cache:read("*all").."\n")
user_cache:close()
end
end
cache_file:close()
src_dir = sys.abs_path(src_dir)
print("Building " .. sys.extract_name(src_dir) .. "...")
-- set cmake cache command
local cache_command = cfg.cache_command
if cfg.debug then cache_command = cache_command .. " " .. cfg.cache_debug_options end
-- set cmake build command
local build_command = cfg.build_command
if cfg.debug then build_command = build_command .. " " .. cfg.build_debug_options end
-- set the cmake cache
local ok = sys.exec("cd " .. sys.quote(cmake_build_dir) .. " && " .. cache_command .. " " .. sys.quote(src_dir))
if not ok then return nil, "Error preloading the CMake cache script '" .. sys.make_path(cmake_build_dir, "cache.cmake") .. "'" end
-- build with cmake
ok = sys.exec("cd " .. sys.quote(cmake_build_dir) .. " && " .. build_command)
if not ok then return nil, "Error building with CMake in directory '" .. cmake_build_dir .. "'" end
-- if this is only simulation, exit sucessfully, skipping the next actions
if cfg.simulate then
return true, "Simulated build and deployment of package '" .. pkg_name .. "' sucessfull."
end
-- table to collect files installed in the components
info.files = {}
-- install the components
for _, component in ipairs(cfg.components) do
local strip_option = ""
if not cfg.debug and component ~= "Library" then strip_option = cfg.strip_option end
local ok = sys.exec("cd " .. sys.quote(cmake_build_dir) .. " && " .. cfg.cmake .. " " .. strip_option .. " " ..cfg.install_component_command:gsub("#COMPONENT#", component))
if not ok then return nil, "Error when installing the component '" .. component .. "' with CMake in directory '" .. cmake_build_dir .. "'" end
local install_mf = sys.make_path(cmake_build_dir, "install_manifest_" .. component .. ".txt")
local mf, err
local component_files = {}
-- collect files installed in this component
if sys.exists(install_mf) then
mf, err = io.open(install_mf, "r")
if not mf then return nil, "Error when opening the CMake installation manifest '" .. install_mf .. "': " .. err end
for line in mf:lines() do
line = sys.check_separators(line)
local file = line:gsub(utils.escape_magic(deploy_dir .. sys.path_separator()), "")
table.insert(component_files, file)
end
mf:close()
-- add list of component files to the 'dist.info'
if #component_files > 0 then info.files[component] = component_files end
end
end
-- if bookmark == 0 then return nil, "Package did not install any files!" end
-- test with ctest
if cfg.test then
print("Testing " .. sys.extract_name(src_dir) .. " ...")
ok = sys.exec("cd " .. sys.quote(deploy_dir) .. " && " .. cfg.test_command)
if not ok then return nil, "Error when testing the module '" .. pkg_name .. "' with CTest." end
end
-- save modified 'dist.info' file
local pkg_distinfo_dir = sys.make_path(deploy_dir, cfg.distinfos_dir, pkg_name)
sys.make_dir(pkg_distinfo_dir)
ok, err = mf.save_distinfo(info, sys.make_path(pkg_distinfo_dir, "dist.info"))
if not ok then return nil, err end
-- clean up
if not cfg.debug then sys.delete(cmake_build_dir) end
return true, "Package '" .. pkg_name .. "' successfully builded and deployed to '" .. deploy_dir .. "'."
end
-- Deploy binary package from 'pkg_dir' to 'deploy_dir' by copying.
function deploy_binary_pkg(pkg_dir, deploy_dir)
deploy_dir = deploy_dir or cfg.root_dir
assert(type(pkg_dir) == "string", "package.deploy_binary_pkg: Argument 'pkg_dir' is not a string.")
assert(type(deploy_dir) == "string", "package.deploy_binary_pkg: Argument 'deploy_dir' is not a string.")
pkg_dir = sys.abs_path(pkg_dir)
deploy_dir = sys.abs_path(deploy_dir)
-- check for dist.info
local info, err = mf.load_distinfo(sys.make_path(pkg_dir, "dist.info"))
if not info then return nil, "Error deploying package from '" .. pkg_dir .. "': it doesn't contain valid 'dist.info' file." end
local pkg_name = info.name .. "-" .. info.version
-- if this is only simulation, exit sucessfully, skipping the next actions
if cfg.simulate then
return true, "Simulated deployment of package '" .. pkg_name .. "' sucessfull."
end
-- copy all components of the module to the deploy_dir
for _, component in ipairs(cfg.components) do
if info.files[component] then
for _, file in ipairs(info.files[component]) do
local dest_dir = sys.make_path(deploy_dir, sys.parent_dir(file))
local ok, err = sys.make_dir(dest_dir)
if not ok then return nil, "Error when deploying package '" .. pkg_name .. "': cannot create directory '" .. dest_dir .. "': " .. err end
ok, err = sys.copy(sys.make_path(pkg_dir, file), dest_dir)
if not ok then return nil, "Error when deploying package '" .. pkg_name .. "': cannot copy file '" .. file .. "' to the directory '" .. dest_dir .. "': " .. err end
end
end
end
-- copy dist.info to register the module as installed
local pkg_distinfo_dir = sys.make_path(deploy_dir, cfg.distinfos_dir, pkg_name)
sys.make_dir(pkg_distinfo_dir)
ok, err = mf.save_distinfo(info, sys.make_path(pkg_distinfo_dir, "dist.info"))
if not ok then return nil, err end
return true, "Package '" .. pkg_name .. "' successfully deployed to '" .. deploy_dir .. "'."
end
-- Fetch package (table 'pkg') to download_dir. Return the original 'pkg' table
-- with 'pkg.download_dir' containing path to the directory of the
-- downloaded package.
--
-- When optional 'suppress_printing' parameter is set to true, then messages
-- for the user won't be printed during run of this function.
--
-- If the 'pkg' already contains the information about download directory (pkg.download_dir),
-- we assume the package was already downloaded there and won't download it again.
function fetch_pkg(pkg, download_dir, suppress_printing)
download_dir = download_dir or sys.current_dir()
suppress_printing = suppress_printing or false
assert(type(pkg) == "table", "package.fetch_pkg: Argument 'pkg' is not a table.")
assert(type(download_dir) == "string", "package.fetch_pkg: Argument 'download_dir' is not a string.")
assert(type(suppress_printing) == "boolean", "package.fetch_pkg: Argument 'suppress_printing' is not a boolean.")
assert(type(pkg.name) == "string", "package.fetch_pkg: Argument 'pkg.name' is not a string.")
assert(type(pkg.version) == "string", "package.fetch_pkg: Argument 'pkg.version' is not a string.")
-- if the package is already downloaded don't download it again
if pkg.download_dir then return pkg end
assert(type(pkg.path) == "string", "package.fetch_pkg: Argument 'pkg.path' is not a string.")
download_dir = sys.abs_path(download_dir)
local pkg_full_name = pkg.name .. "-" .. pkg.version
local repo_url = pkg.path
local clone_dir = sys.abs_path(sys.make_path(download_dir, pkg_full_name))
pkg.download_dir = clone_dir
-- check if download_dir already exists, assuming the package was already downloaded
if sys.exists(sys.make_path(clone_dir, "dist.info")) then
if cfg.cache and not utils.cache_timeout_expired(cfg.cache_timeout, clone_dir) then
if not suppress_printing then print("'" .. pkg_full_name .. "' already in cache, skipping downloading (use '-cache=false' to force download).") end
return pkg
else
sys.delete(sys.make_path(clone_dir))
end
end
local bin_tag = pkg.version .. "-" .. cfg.arch .. "-" .. cfg.type
local use_binary = false
if cfg.binary then
-- check if binary version of the module for this arch & type available
local avail_tags, err = git.get_remote_tags(repo_url)
if not avail_tags then return nil, err end
if utils.contains(avail_tags, bin_tag) then
use_binary = true
end
end
-- init the git repository
local ok, err = git.create_repo(clone_dir)
if not ok then return nil, err end
-- Fetch the desired ref (from the pkg's remote repo) and checkout into it.
if use_binary then
if not suppress_printing then print("Getting " .. pkg_full_name .. " (binary)...") end
-- We fetch the binary tag.
local sha
if ok then sha, err = git.fetch_tag(clone_dir, repo_url, bin_tag) end
if sha then ok, err = git.checkout_sha(sha, clone_dir) end
elseif cfg.source then
if not suppress_printing then print("Getting " .. pkg_full_name .. " (source)...") end
-- If we want the 'scm' version, we fetch the 'master' branch, otherwise
-- we fetch the tag, matching the desired package version.
if ok and pkg.version ~= "scm" then
local sha
sha, err = git.fetch_tag(clone_dir, repo_url, pkg.version)
if sha then ok, err = git.checkout_sha(sha, clone_dir) end
elseif ok then
local sha
sha, err = git.fetch_branch(clone_dir, repo_url, "master")
if sha then ok, err = git.checkout_sha(sha, clone_dir) end
end
else
ok = false
if cfg.binary then
err = "Binary version of module not available and using source modules disabled."
else
err = "Using both binary and source modules disabled."
end
end
if not ok then
-- clean up
if not cfg.debug then sys.delete(clone_dir) end
return nil, "Error fetching package '" .. pkg_full_name .. "' from '" .. pkg.path .. "' to '" .. download_dir .. "': " .. err
end
-- delete '.git' directory
if not cfg.debug then sys.delete(sys.make_path(clone_dir, ".git")) end
return pkg
end
-- Return table with information about available versions of 'package'.
--
-- When optional 'suppress_printing' parameter is set to true, then messages
-- for the user won't be printed during run of this function.
function retrieve_versions(package, manifest, suppress_printing)
suppress_printing = suppress_printing or false
assert(type(package) == "string", "package.retrieve_versions: Argument 'string' is not a string.")
assert(type(manifest) == "table", "package.retrieve_versions: Argument 'manifest' is not a table.")
assert(type(suppress_printing) == "boolean", "package.retrieve_versions: Argument 'suppress_printing' is not a boolean.")
-- get package table
local pkg_name = depends.split_name_constraint(package)
local tmp_packages = depends.find_packages(pkg_name, manifest)
if #tmp_packages == 0 then
return nil, "No suitable candidate for package '" .. package .. "' found."
else
package = tmp_packages[1]
end
-- if the package's already downloaded, we assume it's desired to install the downloaded version
if package.download_dir then
local pkg_type = "binary"
if is_source_type(package.download_dir) then pkg_type = "source" end
if not suppress_printing then print("Using " .. package.name .. "-" .. package.version .. " (" .. pkg_type .. ") provided by " .. package.download_dir) end
return {package}
end
if not suppress_printing then print("Finding out available versions of " .. package.name .. "...") end
-- get available versions
local tags, err = git.get_remote_tags(package.path)
if not tags then return nil, "Error when retrieving versions of package '" .. package.name .. "': " .. err end
-- filter out tags of binary packages
local versions = utils.filter(tags, function (tag) return tag:match("^[^%-]+%-?[^%-]*$") and true end)
packages = {}
-- create package information
for _, version in pairs(versions) do
pkg = {}
pkg.name = package.name
pkg.version = version
pkg.path = package.path
table.insert(packages, pkg)
end
return packages
end
-- Return table with information from package's dist.info and path to downloaded
-- package. Optional argument 'deploy_dir' is used just as a temporary
-- place to place the downloaded packages into.
--
-- When optional 'suppress_printing' parameter is set to true, then messages
-- for the user won't be printed during the execution of this function.
function retrieve_pkg_info(package, deploy_dir, suppress_printing)
deploy_dir = deploy_dir or cfg.root_dir
assert(type(package) == "table", "package.retrieve_pkg_info: Argument 'package' is not a table.")
assert(type(deploy_dir) == "string", "package.retrieve_pkg_info: Argument 'deploy_dir' is not a string.")
deploy_dir = sys.abs_path(deploy_dir)
local tmp_dir = sys.abs_path(sys.make_path(deploy_dir, cfg.temp_dir))
-- download the package
local fetched_pkg, err = fetch_pkg(package, tmp_dir, suppress_printing)
if not fetched_pkg then return nil, "Error when retrieving the info about '" .. package.name .. "': " .. err end
-- load information from 'dist.info'
local info, err = mf.load_distinfo(sys.make_path(fetched_pkg.download_dir, "dist.info"))
if not info then return nil, err end
-- add other attributes
if package.path then info.path = package.path end
if package.was_scm_version then info.was_scm_version = package.was_scm_version end
-- set default arch/type if not explicitly stated and package is of source type
if is_source_type(fetched_pkg.download_dir) then
info = ensure_source_arch_and_type(info)
elseif not (info.arch and info.type) then
return nil, fetched_pkg.download_dir .. ": binary package missing arch or type in 'dist.info'."
end
return info, fetched_pkg.download_dir
end
-- Return manifest, augmented with info about all available versions
-- of package 'pkg'. Optional argument 'deploy_dir' is used just as a temporary
-- place to place the downloaded packages into.
-- Optional argument 'installed' is manifest of all installed packages. When
-- specified, info from installed packages won't be downloaded from repo,
-- but the dist.info from installed package will be used.
function get_versions_info(pkg, manifest, deploy_dir, installed)
deploy_dir = deploy_dir or cfg.root_dir
assert(type(pkg) == "string", "package.get_versions_info: Argument 'pkg' is not a string.")
assert(type(manifest) == "table", "package.get_versions_info: Argument 'manifest' is not a table.")
assert(type(deploy_dir) == "string", "package.get_versions_info: Argument 'deploy_dir' is not a string.")
deploy_dir = sys.abs_path(deploy_dir)
-- find all available versions of package
local versions, err = retrieve_versions(pkg, manifest)
if not versions then return nil, err end
-- collect info about all retrieved versions
local infos = {}
for _, version in pairs(versions) do
local info, path_or_err
local installed_version = {}
-- find out whether this 'version' is installed so we can use it's dist.info
if type(installed) == "table" then installed_version = depends.find_packages(version.name .. "-" .. version.version, installed) end
-- get info
if #installed_version > 0 then
print("Using dist.info from installed " .. version.name .. "-" .. version.version)
info = installed_version[1]
info.path = version.path
info.from_installed = true -- flag that dist.info of installed package was used
else
info, path_or_err = retrieve_pkg_info(version, deploy_dir)
if not info then return nil, path_or_err end
sys.delete(path_or_err)
end
table.insert(infos, info)
end
-- found and add an implicit 'scm' version
local pkg_name = depends.split_name_constraint(pkg)
local found = depends.find_packages(pkg_name, manifest)
if #found == 0 then return nil, "No suitable candidate for package '" .. pkg .. "' found." end
local scm_info, path_or_err = retrieve_pkg_info({name = pkg_name, version = "scm", path = found[1].path})
if not scm_info then return nil, path_or_err end
sys.delete(path_or_err)
scm_info.version = "scm"
table.insert(infos, scm_info)
local tmp_manifest = utils.deepcopy(manifest)
-- add collected info to the temp. manifest, replacing existing tables
for _, info in pairs(infos) do
local already_in_manifest = false
-- find if this version is already in manifest
for idx, pkg in ipairs(tmp_manifest) do
-- if yes, replace it
if pkg.name == info.name and pkg.version == info.version then
tmp_manifest[idx] = info
already_in_manifest = true
break
end
end
-- if not, just normally add to the manifest
if not already_in_manifest then
table.insert(tmp_manifest, info)
end
end
return tmp_manifest
end

View File

@ -0,0 +1,386 @@
-- System functions
module ("dist.sys", package.seeall)
local cfg = require "dist.config"
local utils = require "dist.utils"
local lfs = require "lfs"
-- Return the path separator according to the platform.
function path_separator()
if cfg.arch == "Windows" then
return "\\"
else
return "/"
end
end
-- Return path with wrong separators replaced with the right ones.
function check_separators(path)
assert(type(path) == "string", "sys.check_separators: Argument 'path' is not a string.")
if cfg.arch == "Windows" then
return path:gsub("/", "\\")
else
return path
end
end
-- Return the path with the unnecessary trailing separator removed.
function remove_trailing(path)
assert(type(path) == "string", "sys.remove_trailing: Argument 'path' is not a string.")
if path:sub(-1) == path_separator() and not is_root(path) then path = path:sub(1,-2) end
return path
end
-- Return the path with the all occurences of '/.' or '\.' (representing
-- the current directory) removed.
function remove_curr_dir_dots(path)
assert(type(path) == "string", "sys.remove_curr_dir_dots: Argument 'path' is not a string.")
while path:match(path_separator() .. "%." .. path_separator()) do -- match("/%./")
path = path:gsub(path_separator() .. "%." .. path_separator(), path_separator()) -- gsub("/%./", "/")
end
return path:gsub(path_separator() .. "%.$", "") -- gsub("/%.$", "")
end
-- Return string argument quoted for a command line usage.
function quote(argument)
assert(type(argument) == "string", "sys.quote: Argument 'argument' is not a string.")
-- TODO: This seems like a not very nice hack. Why is it needed?
-- Wouldn't it be better to fix the problem where it originates?
-- replace '/' path separators for '\' on Windows
if cfg.arch == "Windows" and argument:match("^[%u%U.]?:?[/\\].*") then
argument = argument:gsub("//","\\"):gsub("/","\\")
end
-- Windows doesn't recognize paths starting with two slashes or backslashes
-- so we double every backslash except for the first one
if cfg.arch == "Windows" and argument:match("^[/\\].*") then
local prefix = argument:sub(1,1)
argument = argument:sub(2):gsub("\\", "\\\\")
argument = prefix .. argument
else
argument = argument:gsub("\\", "\\\\")
end
argument = argument:gsub('"', '\\"')
return '"' .. argument .. '"'
end
-- Run the system command (in current directory).
-- Return true on success, nil on fail and log string.
-- When optional 'force_verbose' parameter is true, then the output will be shown
-- even when not in debug or verbose mode.
function exec(command, force_verbose)
force_verbose = force_verbose or false
assert(type(command) == "string", "sys.exec: Argument 'command' is not a string.")
assert(type(force_verbose) == "boolean", "sys.exec: Argument 'force_verbose' is not a boolean.")
if not (cfg.verbose or cfg.debug or force_verbose) then
if cfg.arch == "Windows" then
command = command .. " > NUL 2>&1"
else
command = command .. " > /dev/null 2>&1"
end
end
if cfg.debug then print("Executing the command: " .. command) end
local ok, str, status = os.execute(command)
-- os.execute returned values on failure are:
-- nil or true, "exit", n or true, "signal", n for lua >= 5.2
-- status ~= 0 for lua 5.x < 5.2
if ok == nil or (str == "exit" and status ~= 0) or str == "signal" or (ok ~= 0 and ok ~= true) then
return nil, "Error when running the command: " .. command
else
return true, "Sucessfully executed the command: " .. command
end
end
-- Execute the 'command' and returns its output as a string.
function capture_output(command)
assert(type(command) == "string", "sys.exec: Argument 'command' is not a string.")
local executed, err = io.popen(command, "r")
if not executed then return nil, "Error running the command '" .. command .. "':" .. err end
local captured, err = executed:read("*a")
if not captured then return nil, "Error reading the output of command '" .. command .. "':" .. err end
executed:close()
return captured
end
-- Return whether the path is a root.
function is_root(path)
assert(type(path) == "string", "sys.is_root: Argument 'path' is not a string.")
return utils.to_boolean(path:find("^[a-zA-Z]:[/\\]$") or path:find("^[/\\]$"))
end
-- Return whether the path is absolute.
function is_abs(path)
assert(type(path) == "string", "sys.is_abs: Argument 'path' is not a string.")
return utils.to_boolean(path:find("^[a-zA-Z]:[/\\].*$") or path:find("^[/\\].*$"))
end
-- Return whether the specified file or directory exists.
function exists(path)
assert(type(path) == "string", "sys.exists: Argument 'path' is not a string.")
local attr, err = lfs.attributes(path)
return utils.to_boolean(attr), err
end
-- Return whether the 'file' exists and is a file.
function is_file(file)
assert(type(file) == "string", "sys.is_file: Argument 'file' is not a string.")
return lfs.attributes(file, "mode") == "file"
end
-- Return whether the 'dir' exists and is a directory.
function is_dir(dir)
assert(type(dir) == "string", "sys.is_dir: Argument 'dir' is not a string.")
return lfs.attributes(dir, "mode") == "directory"
end
-- Return the current working directory
function current_dir()
local dir, err = lfs.currentdir()
if not dir then return nil, err end
return dir
end
-- Return an iterator over the directory 'dir'.
-- If 'dir' doesn't exist or is not a directory, return nil and error message.
function get_directory(dir)
dir = dir or current_dir()
assert(type(dir) == "string", "sys.get_directory: Argument 'dir' is not a string.")
if is_dir(dir) then
return lfs.dir(dir)
else
return nil, "Error: '".. dir .. "' is not a directory."
end
end
-- Extract file or directory name from its path.
function extract_name(path)
assert(type(path) == "string", "sys.extract_name: Argument 'path' is not a string.")
if is_root(path) then return path end
path = remove_trailing(path)
path = path:gsub("^.*" .. path_separator(), "")
return path
end
-- Return parent directory of the 'path' or nil if there's no parent directory.
-- If 'path' is a path to file, return the directory the file is in.
function parent_dir(path)
assert(type(path) == "string", "sys.parent_dir: Argument 'path' is not a string.")
path = remove_curr_dir_dots(path)
path = remove_trailing(path)
local dir = path:gsub(utils.escape_magic(extract_name(path)) .. "$", "")
if dir == "" then
return nil
else
return make_path(dir)
end
end
-- Returns the table of all parent directories of 'path' up to the directory
-- specified by 'boundary_path' (exclusive).
function parents_up_to(path, boundary_path)
assert(type(path) == "string", "sys.parents_up_to: Argument 'path' is not a string.")
assert(type(boundary_path) == "string", "sys.parents_up_to: Argument 'boundary_path' is not a string.")
boundary_path = remove_trailing(boundary_path)
-- helper function to recursively collect the parent directories
local function collect_parents(_path, _parents)
local _parent = parent_dir(_path)
if _parent and _parent ~= boundary_path then
table.insert(_parents, _parent)
return collect_parents(_parent, _parents)
else
return _parents
end
end
return collect_parents(path, {})
end
-- Compose path composed from specified parts or current
-- working directory when no part specified.
function make_path(...)
-- arg is deprecated in lua 5.2 in favor of table.pack we mimic here
local arg = {n=select('#',...),...}
local parts = arg
assert(type(parts) == "table", "sys.make_path: Argument 'parts' is not a table.")
local path, err
if parts.n == 0 then
path, err = current_dir()
else
path, err = table.concat(parts, path_separator())
end
if not path then return nil, err end
-- squeeze repeated occurences of a file separator
path = path:gsub(path_separator() .. "+", path_separator())
-- remove unnecessary trailing path separator
path = remove_trailing(path)
return path
end
-- Return absolute path from 'path'
function abs_path(path)
assert(type(path) == "string", "sys.get_abs_path: Argument 'path' is not a string.")
if is_abs(path) then return path end
local cur_dir, err = current_dir()
if not cur_dir then return nil, err end
return make_path(cur_dir, path)
end
-- Returns path to the temporary directory of OS.
function tmp_dir()
return os.getenv("TMPDIR") or os.getenv("TEMP") or os.getenv("TMP") or "/tmp"
end
-- Returns temporary file (or directory) path (with optional prefix).
function tmp_name(prefix)
prefix = prefix or ""
assert(type(prefix) == "string", "sys.tmp_name: Argument 'prefix' is not a string.")
return make_path(tmp_dir(), prefix .. "luadist_" .. utils.rand(10000000000))
end
-- Return table of all paths in 'dir'
function get_file_list(dir)
dir = dir or current_dir()
assert(type(dir) == "string", "sys.get_directory: Argument 'dir' is not a string.")
if not exists(dir) then return nil, "Error getting file list of '" .. dir .. "': directory doesn't exist." end
local function collect(path, all_paths)
for item in get_directory(path) do
local item_path = make_path(path, item)
local _, last = item_path:find(dir .. path_separator(), 1, true)
local path_to_insert = item_path:sub(last + 1)
if is_file(item_path) then
table.insert(all_paths, path_to_insert)
elseif is_dir(item_path) and item ~= "." and item ~= ".." then
table.insert(all_paths, path_to_insert)
collect(item_path, all_paths)
end
end
end
local all_paths = {}
collect(dir, all_paths)
return all_paths
end
-- Return time of the last modification of 'file'.
function last_modification_time(file)
assert(type(file) == "string", "sys.last_modification_time: Argument 'file' is not a string.")
return lfs.attributes(file, "modification")
end
-- Return the current time (in seconds since epoch).
function current_time()
return os.time()
end
-- Change the current working directory and return 'true' and previous working
-- directory on success and 'nil' and error message on error.
function change_dir(dir_name)
assert(type(dir_name) == "string", "sys.change_dir: Argument 'dir_name' is not a string.")
local prev_dir = current_dir()
local ok, err = lfs.chdir(dir_name)
if ok then
return ok, prev_dir
else
return nil, err
end
end
-- Make a new directory, making also all of its parent directories that doesn't exist.
function make_dir(dir_name)
assert(type(dir_name) == "string", "sys.make_dir: Argument 'dir_name' is not a string.")
if exists(dir_name) then
return true
else
local par_dir = parent_dir(dir_name)
if par_dir then
local ok, err = make_dir(par_dir)
if not ok then return nil, err end
end
return lfs.mkdir(dir_name)
end
end
-- Move file (or directory) to the destination directory
function move_to(file_or_dir, dest_dir)
assert(type(file_or_dir) == "string", "sys.move_to: Argument 'file_or_dir' is not a string.")
assert(type(dest_dir) == "string", "sys.move_to: Argument 'dest_dir' is not a string.")
assert(is_dir(dest_dir), "sys.move_to: Destination '" .. dest_dir .."' is not a directory.")
-- Extract file/dir name from its path
local file_or_dir_name = extract_name(file_or_dir)
return os.rename(file_or_dir, make_path(dest_dir, file_or_dir_name))
end
-- rename file (or directory) to the new name.
function rename(file, new_name)
assert(type(file) == "string", "sys.rename: Argument 'file' is not a string.")
assert(type(new_name) == "string", "sys.rename: Argument 'new_name' is not a string.")
assert(not exists(new_name), "sys.rename: desired filename already exists.")
return os.rename(file, new_name)
end
-- Copy 'source' to the destination directory 'dest_dir'.
-- If 'source' is a directory, then recursive copying is used.
-- For non-recursive copying of directories use the make_dir() function.
function copy(source, dest_dir)
assert(type(source) == "string", "sys.copy: Argument 'file_or_dir' is not a string.")
assert(type(dest_dir) == "string", "sys.copy: Argument 'dest_dir' is not a string.")
assert(is_dir(dest_dir), "sys.copy: destination '" .. dest_dir .."' is not a directory.")
if cfg.arch == "Windows" then
if is_dir(source) then
make_dir(make_path(dest_dir, extract_name(source)))
return exec("xcopy /E /I /Y /Q " .. quote(source) .. " " .. quote(dest_dir .. "\\" .. extract_name(source)))
else
return exec("copy /Y " .. quote(source) .. " " .. quote(dest_dir))
end
else
if is_dir(source) then
return exec("cp -fRH " .. quote(source) .. " " .. quote(dest_dir))
else
return exec("cp -fH " .. quote(source) .. " " .. quote(dest_dir))
end
end
end
-- Delete the specified file or directory
function delete(path)
assert(type(path) == "string", "sys.delete: Argument 'path' is not a string.")
assert(is_abs(path), "sys.delete: Argument 'path' is not an absolute path.")
if cfg.arch == "Windows" then
if not exists(path) then
return true
elseif is_file(path) then
return os.remove(path)
else
return exec("rd /S /Q " .. quote(path))
end
else
return exec("rm -rf " .. quote(path))
end
end

View File

@ -0,0 +1,151 @@
-- System functions
module ("dist.utils", package.seeall)
local sys = require "dist.sys"
-- Returns a deep copy of 'table' with reference to the same metadata table.
-- Source: http://lua-users.org/wiki/CopyTable
function deepcopy(object)
local lookup_table = {}
local function _copy(object)
if type(object) ~= "table" then
return object
elseif lookup_table[object] then
return lookup_table[object]
end
local new_table = {}
lookup_table[object] = new_table
for index, value in pairs(object) do
new_table[_copy(index)] = _copy(value)
end
return setmetatable(new_table, getmetatable(object))
end
return _copy(object)
end
-- Return deep copy of table 'array', containing only items for which 'predicate_fn' returns true.
function filter(array, predicate_fn)
assert(type(array) == "table", "utils.filter: Argument 'array' is not a table.")
assert(type(predicate_fn) == "function", "utils.filter: Argument 'predicate_fn' is not a function.")
local filtered = {}
for _,v in pairs(array) do
if predicate_fn(v) == true then table.insert(filtered, deepcopy(v)) end
end
return filtered
end
-- Return deep copy of table 'array', sorted according to the 'compare_fn' function.
function sort(array, compare_fn)
assert(type(array) == "table", "utils.sort: Argument 'array' is not a table.")
assert(type(compare_fn) == "function", "utils.sort: Argument 'compare_fn' is not a function.")
local sorted = deepcopy(array)
table.sort(sorted, compare_fn)
return sorted
end
-- Return whether the 'value' is in the table 'tbl'.
function contains(tbl, value)
assert(type(tbl) == "table", "utils.contains: Argument 'tbl' is not a table.")
for _,v in pairs(tbl) do
if v == value then return true end
end
return false
end
-- Return single line string consisting of values in 'tbl' separated by comma.
-- Used for printing the dependencies/provides/conflicts.
function table_tostring(tbl, label)
assert(type(tbl) == "table", "utils.table_tostring: Argument 'tbl' is not a table.")
local str = ""
for k,v in pairs(tbl) do
if type(v) == "table" then
str = str .. table_tostring(v, k)
else
if label ~= nil then
str = str .. tostring(v) .. " [" .. tostring(label) .. "]" .. ", "
else
str = str .. tostring(v) .. ", "
end
end
end
return str
end
-- Return table made up from values of the string, separated by separator.
function make_table(str, separator)
assert(type(str) == "string", "utils.make_table: Argument 'str' is not a string.")
assert(type(separator) == "string", "utils.make_table: Argument 'separator' is not a string.")
local tbl = {}
for val in str:gmatch("(.-)" .. separator) do
table.insert(tbl, val)
end
local last_val = str:gsub(".-" .. separator, "")
if last_val and last_val ~= "" then
table.insert(tbl, last_val)
end
return tbl
end
-- Return whether the 'cache_timeout' for 'file' has expired.
function cache_timeout_expired(cache_timeout, file)
assert(type(cache_timeout) == "number", "utils.cache_timeout_expired: Argument 'cache_timeout' is not a number.")
assert(type(file) == "string", "utils.cache_timeout_expired: Argument 'file' is not a string.")
return sys.last_modification_time(file) + cache_timeout < sys.current_time()
end
-- Return the string 'str', with all magic (pattern) characters escaped.
function escape_magic(str)
assert(type(str) == "string", "utils.escape: Argument 'str' is not a string.")
local escaped = str:gsub('[%-%.%+%[%]%(%)%^%%%?%*%^%$]','%%%1')
return escaped
end
-- Return the boolean representation of an 'arg'.
function to_boolean(arg)
return not not arg
end
math.randomseed(os.time())
-- Return pseudo-random number in range [0, 1], [1, n] or [n, m].
function rand(...)
return math.random(...)
end
-- Perform check of system dependency, which isn't provided in the LuaDist
-- installation itself and if it is missing, print instructions how
-- to install it. The 'command' is used for testing, 'name' when printing
-- information to the user.
function system_dependency_available(name, command)
assert(type(name) == "string", "utils.system_dependency_available: Argument 'name' is not a string.")
assert(type(command) == "string", "utils.system_dependency_available: Argument 'command' is not a string.")
if not sys.exec(command) then
print("Error: command '" .. name .. "' not found on system. See installation instructions at\nhttps://github.com/LuaDist/Repository/wiki/Installation-of-System-Dependencies")
return false
end
return true
end
-- Obtain LuaDist location by checking available package locations
function get_luadist_location()
local paths = {}
local path = package.path:gsub("([^;]+)", function(c) table.insert(paths, c) end)
for _, path in pairs(paths) do
if (sys.is_abs(path) and path:find("[/\\]lib[/\\]lua[/\\]%?.lua$")) then
-- Remove path to lib/lua
path = path:gsub("[/\\]lib[/\\]lua[/\\]%?.lua$", "")
-- Clean the path up a bit
path = path:gsub("[/\\]bin[/\\]%.[/\\]%.%.", "")
path = path:gsub("[/\\]bin[/\\]%.%.", "")
return path
end
end
return nil
end

View File

@ -0,0 +1,5 @@
require 'git.util'
require 'git.objects'
require 'git.pack'
require 'git.repo'
require 'git.protocol'

View File

@ -0,0 +1,121 @@
local util = require 'git.util'
local assert, next, io, print, os, type, string, pairs, tostring =
assert, next, io, print, os, type, string, pairs, tostring
local join_path = git.util.join_path
local require = require
local isPosix = package.config:sub(1,1) == '/' -- wild guess
module(...)
Commit = {}
Commit.__index = Commit
function Commit:tree()
return self.repo:tree(self.tree_sha)
end
function Commit:checkout(path)
assert(path, 'path argument missing')
self:tree():checkoutTo(path)
end
Tree = {}
Tree.__index = function (t,k)
if Tree[k] then return Tree[k] end
return t:entry(k)
end
function Tree:entries()
return function(t, n)
local n, entry = next(t, n)
if entry then
local object
if entry.type == 'tree' then
object = self.repo:tree(entry.id)
elseif entry.type == 'blob' then
object = self.repo:blob(entry.id)
object.mode = entry.mode
elseif entry.type == 'commit' then
-- this is possibly a commit in a submodule,
-- do not retrieve it from current repo
object = entry
else
error('Unknown entry type: ' .. entry.type)
end
return n, entry.type, object
end
end, self._entries
end
function Tree:entry(n)
local e = self._entries[n]
if not e then return end
if e.type == 'tree' then
return self.repo:tree(e.id)
elseif e.type == 'commit' then
return self.repo:commit(e.id)
elseif e.type == 'blob' then
return self.repo:blob(e.id)
else
error('Unknown entry type: ' .. e.type)
end
end
function Tree:walk(func, path)
path = path or '.'
assert(type(func) == "function", "argument is not a function")
local function walk(tree, path)
for name, type, entry in tree:entries() do
local entry_path = join_path(path, name)
func(entry, entry_path, type)
if type == "tree" then
walk(entry, entry_path)
end
end
end
walk(self, path)
end
function Tree:checkoutTo(path)
util.make_dir(path)
self:walk(function (entry, entry_path, type)
if type == 'tree' then
util.make_dir(entry_path)
elseif type == 'blob' then
local out = assert(io.open(entry_path, 'wb'))
out:write(entry:content())
out:close()
if isPosix then
local mode = entry.mode:sub(-3,-1) -- fixme: is this ok?
local cmd = 'chmod '..mode..' "'..entry_path..'"'
os.execute(cmd)
end
elseif type == 'commit' then
-- this is a submodule referencing a commit,
-- make a directory for it
util.make_dir(entry_path)
else
error('Unknown entry type: ', type)
end
end, path)
end
Blob = {}
Blob.__index = Blob
function Blob:content()
if self.stored then
local f = self.repo:raw_object(self.id)
local ret = f:read('*a') or ""
f:close()
return ret
else
return self.data
end
end

View File

@ -0,0 +1,316 @@
local io = io
local core = require 'git.core'
local assert, pcall, print, select, setmetatable, string, type, unpack =
assert, pcall, print, select, setmetatable, string, type, unpack
local ord = string.byte
local fmt = string.format
local concat, insert = table.concat, table.insert
local band = core.band
local rshift, lshift = core.rshift, core.lshift
local to_hex = git.util.to_hex
local from_hex = git.util.from_hex
local object_sha = git.util.object_sha
local binary_sha = git.util.binary_sha
local readable_sha = git.util.readable_sha
local tmpfile = git.util.tmpfile
local reader = git.util.reader
module(...)
-- read git/Documentation/technical/pack-format.txt for some inspiration
-- 1 = commit, 2 = tree ...
local types = {'commit', 'tree', 'blob', 'tag', '???', 'ofs_delta', 'ref_delta'}
-- read a 4 byte unsigned integer stored in network order
local function read_int(f)
local s = f:read(4)
local a,b,c,d = s:byte(1,4)
return a*256^3 + b*256^2 + c*256 + d
end
-- read in the type and file length
local function read_object_header(f)
local b = ord(f:read(1))
local type = band(rshift(b, 4), 0x7)
local len = band(b, 0xF)
local ofs = 0
while band(b, 0x80) ~= 0 do
b = ord(f:read(1))
len = len + lshift(band(b, 0x7F), ofs * 7 + 4)
ofs = ofs + 1
end
return len, type
end
-- reads in the delta header and returns the offset where original data is stored
local function read_delta_header(f)
local b = ord(f:read(1))
local offset = band(b, 0x7F)
while band(b, 0x80) ~= 0 do
offset = offset + 1
b = ord(f:read(1))
offset = lshift(offset, 7) + band(b, 0x7F)
end
return offset
end
-- read just enough of file `f` to uncompress `size` bytes
local function uncompress_by_len(f, size)
local z = core.inflate()
local chunks = {}
local CHUNK_SIZE = 1024
local curr_pos = f:seek()
local inflated, eof, total
-- read until end of zlib-compresed stream
while not eof do
local data = f:read(CHUNK_SIZE)
inflated, eof, total = z(data)
insert(chunks, inflated)
end
-- repair the current position in stream
f:seek('set', curr_pos + total)
return concat(chunks)
end
-- uncompress the object from the current location in `f`
local function unpack_object(f, len, type)
local data = uncompress_by_len(f, len)
return data, len, type
end
-- returns a size value encoded in delta data
local function delta_size(f)
local size = 0
local i = 0
repeat
local b = ord(f:read(1))
size = size + lshift(band(b, 0x7F), i)
i = i + 7
until band(b, 0x80) == 0
return size
end
-- returns a patched object from string `base` according to `delta` data
local function patch_object(base, delta, base_type)
-- insert delta codes into temporary file
local df = reader(delta)
-- retrieve original and result size (for checks)
local orig_size = delta_size(df)
assert(#base == orig_size, fmt('#base(%d) ~= orig_size(%d)', #base, orig_size))
local result_size = delta_size(df)
local size = result_size
local result = {}
-- process the delta codes
local cmd = df:read(1)
while cmd do
cmd = ord(cmd)
if cmd == 0 then
error('unexpected delta code 0')
elseif band(cmd, 0x80) ~= 0 then -- copy a selected part of base data
local cp_off, cp_size = 0, 0
-- retrieve offset
if band(cmd, 0x01) ~= 0 then cp_off = ord(df:read(1)) end
if band(cmd, 0x02) ~= 0 then cp_off = cp_off + ord(df:read(1))*256 end
if band(cmd, 0x04) ~= 0 then cp_off = cp_off + ord(df:read(1))*256^2 end
if band(cmd, 0x08) ~= 0 then cp_off = cp_off + ord(df:read(1))*256^3 end
-- retrieve size
if band(cmd, 0x10) ~= 0 then cp_size = ord(df:read(1)) end
if band(cmd, 0x20) ~= 0 then cp_size = cp_size + ord(df:read(1))*256 end
if band(cmd, 0x40) ~= 0 then cp_size = cp_size + ord(df:read(1))*256^2 end
if cp_size == 0 then cp_size = 0x10000 end
if cp_off + cp_size > #base or cp_size > size then break end
-- get the data and append it to result
local data = base:sub(cp_off + 1, cp_off + cp_size)
insert(result, data)
size = size - cp_size
else -- insert new data
if cmd > size then break end
local data = df:read(cmd)
insert(result, data)
size = size - cmd
end
cmd = df:read(1)
end
df:close()
result = concat(result)
assert(#result == result_size, fmt('#result(%d) ~= result_size(%d)', #result, result_size))
return result, result_size, base_type
end
Pack = {}
Pack.__index = Pack
-- read an object from the current location in pack, or from a specific `offset`
-- if specified
function Pack:read_object(offset, ignore_data)
local f = self.pack_file
if offset then
f:seek('set', offset)
end
local curr_pos = f:seek()
local len, type = read_object_header(f)
if type < 5 then -- commit, tree, blob, tag
return unpack_object(f, len, type)
elseif type == 6 then -- ofs_delta
local offset = read_delta_header(f)
local delta_data = uncompress_by_len(f, len)
if not ignore_data then
-- the offset is negative from the current location
local base, base_len, base_type = self:read_object(curr_pos - offset)
return patch_object(base, delta_data, base_type)
end
elseif type == 7 then -- ref_delta
local sha = f:read(20)
local delta_data = uncompress_by_len(f, len)
if not ignore_data then
-- lookup the object in the pack by sha
-- FIXME: maybe lookup in repo/other packs
local base_offset = self.index[binary_sha(sha)]
local base, base_len, base_type = self:read_object(base_offset)
return patch_object(base, delta_data, base_type)
end
else
error('unknown object type: '..type)
end
end
-- returns true if this pack contains the given object
function Pack:has_object(sha)
return self.index[binary_sha(sha)] ~= nil
end
-- if the object name `sha` exists in the pack, returns a temporary file with the
-- object content, length and type, otherwise returns nil
function Pack:get_object(sha)
local offset = self.index[binary_sha(sha)]
if not offset then
print('!!! Failed to find object', readable_sha(sha))
end
local data, len, type = self:read_object(offset)
print(readable_sha(sha), len, type, data)
local f = tmpfile()
f:write(data)
f:seek('set', 0)
return f, len, types[type]
end
function Pack:unpack(repo)
for i=1, self.nobjects do
local offset = self.offsets[i]
local data, len, type = self:read_object(offset)
repo:store_object(data, len, types[type])
end
end
-- parses the index
function Pack:parse_index(index_file)
local f = index_file
local head = f:read(4)
assert(head == '\255tOc', "Incorrect header: " .. head)
local version = read_int(f)
assert(version == 2, "Incorrect version: " .. version)
-- first the fanout table (how many objects are in the index, whose
-- first byte is below or equal to i)
local fanout = {}
for i=0, 255 do
local nobjs = read_int(f)
fanout[i] = nobjs
end
-- the last element in fanout is the number of all objects in index
local count = fanout[255]
-- then come the sorted object names (=sha hash)
local tmp = {}
for i=1,count do
local sha = f:read(20)
tmp[i] = { sha = sha }
end
-- then the CRCs (assume ok, skip them)
for i=1, count do
local crc = f:read(4)
end
-- then come the offsets - read just the 32bit ones, does not handle packs > 2G
for i=1, count do
local offset = read_int(f)
tmp[i].offset = offset
end
-- construct the lookup table
local lookup = {}
for i=1, count do
lookup[tmp[i].sha] = tmp[i].offset
end
self.index = lookup
end
-- constructs the index/offsets if the index file is missing
function Pack:construct_index(path)
local index = {}
for i=1, self.nobjects do
local offset = self.offsets[i]
local data, len, type = self:read_object(offset)
local sha = object_sha(data, len, types[type])
index[binary_sha(sha)] = offset
end
self.index = index
end
function Pack:close()
self.pack_file:close()
end
function Pack.open(path)
local fp = assert(io.open(path, 'rb')) -- stays open
-- read the pack header
local head = fp:read(4)
assert(head == 'PACK', "Incorrect header: " .. head)
local version = read_int(fp)
assert(version == 2, "Incorrect version: " .. version)
local nobj = read_int(fp)
local pack = setmetatable({
offsets = {},
nobjects = nobj,
pack_file = fp,
}, Pack)
-- fill the offsets by traversing through the pack
for i=1,nobj do
pack.offsets[i] = fp:seek()
-- ignore the object data, we only need the offset in the pack
pack:read_object(nil, true)
end
-- read the index
local fi = io.open((path:gsub('%.pack$', '.idx')), 'rb')
if fi then
pack:parse_index(fi)
fi:close()
else
pack:construct_index(path)
end
return pack
end
return Pack

View File

@ -0,0 +1,188 @@
local socket = require 'socket'
local urllib = require 'socket.url'
local lfs = require 'lfs'
local Repo = git.repo.Repo
local Pack = git.pack.Pack
local join_path = git.util.join_path
local parent_dir = git.util.parent_dir
local make_dir = git.util.make_dir
local correct_separators = git.util.correct_separators
local assert, error, getmetatable, io, os, pairs, print, require, string, tonumber =
assert, error, getmetatable, io, os, pairs, print, require, string, tonumber
local _VERSION, newproxy = _VERSION, newproxy
module(...)
local GIT_PORT = 9418
local function git_connect(host)
local sock = assert(socket.connect(host, GIT_PORT))
local gitsocket = {}
function gitsocket:send(data)
if not data then -- flush packet
sock:send('0000')
else
local len = #data + 4
len = string.format("%04x", len)
assert(sock:send(len .. data))
end
end
function gitsocket:receive()
local len = assert(sock:receive(4))
len = tonumber(len, 16)
if len == 0 then return end -- flush packet
local data = assert(sock:receive(len - 4))
return data
end
function gitsocket:close()
sock:close()
end
return gitsocket
end
local function addFinalizer(object, finalizer)
if _VERSION <= "Lua 5.1" then
local gc = newproxy(true)
getmetatable(gc).__gc = finalizer
object.__gc = gc
else
local mt = getmetatable(object)
if mt then mt.__gc = finalizer
else setmetatable(object, {__gc = finalizer})
end
end
end
local function git_fetch(host, path, repo, head, supress_progress)
local s = git_connect(host)
s:send('git-upload-pack '..path..'\0host='..host..'\0')
local refs, refsbyname = {}, {}
repeat
local ref = s:receive()
if ref then
local sha, name = ref:sub(1,40), ref:sub(42, -2)
refs[sha] = name
refsbyname[name] = sha
end
until not ref
local wantedSha
local headsha = head and refsbyname[head]
for sha, ref in pairs(refs) do
-- we implicitly want this ref
local wantObject = true
-- unless we ask for a specific head
if headsha then
if sha ~= headsha then
wantObject = false
else
wantedSha = sha
end
end
-- or we already have it
if repo and repo:has_object(sha) then
wantObject = false
end
if wantObject then
s:send('want '..sha..' multi_ack_detailed side-band-64k ofs-delta\n')
end
end
if head and not wantedSha then
error("Server does not have "..head)
end
s:send('deepen 1')
s:send()
while s:receive() do end
s:send('done\n')
assert(s:receive() == "NAK\n")
local packname = os.tmpname() .. '.pack'
local packfile = assert(io.open(packname, 'wb'))
repeat
local got = s:receive()
if got then
-- get sideband channel, 1=pack data, 2=progress, 3=error
local cmd = string.byte(got:sub(1,1))
local data = got:sub(2)
if cmd == 1 then
packfile:write(data)
elseif cmd == 2 then
if not supress_progress then io.write(data) end
else
error(data)
end
end
until not got
packfile:close()
s:close()
local pack = Pack.open(packname)
if repo then
pack:unpack(repo)
repo.isShallow = true
if wantedSha then
local headfile = correct_separators(join_path(repo.dir, head))
assert(make_dir(parent_dir(headfile)))
local f = assert(io.open(headfile, 'wb'))
f:write(wantedSha)
f:close()
end
end
addFinalizer(pack, function()
os.remove(packname)
end)
return pack, wantedSha
end
function fetch(url, repo, head, supress_progress)
if repo then assert(getmetatable(repo) == Repo, "arg #2 is not a repository") end
url = urllib.parse(url)
if url.scheme == 'git' then
local pack, sha = git_fetch(url.host, url.path, repo, head, supress_progress)
return pack, sha
else
error('unsupported scheme: '..url.scheme)
end
end
function remotes(url)
-- TODO: refactor common code
url = assert(urllib.parse(url))
if url.scheme ~= 'git' then
error('unsupported scheme: '..url.scheme)
end
local host, path = url.host, url.path
local s = git_connect(host)
s:send('git-upload-pack '..path..'\0host='..host..'\0')
local remote = {}
repeat
local ref = s:receive()
if ref then
local sha, name = ref:sub(1,40), ref:sub(42, -2)
remote[name] = sha
end
until not ref
s:close()
return remote
end

View File

@ -0,0 +1,283 @@
local util = require 'git.util'
local objects = require 'git.objects'
local core = require 'git.core'
local pack = require 'git.pack'
local join_path = util.join_path
local decompressed = util.decompressed
local read_until_nul = util.read_until_nul
local to_hex = util.to_hex
local object_sha = util.object_sha
local readable_sha = util.readable_sha
local deflate = core.deflate
local lfs = require 'lfs'
local assert, error, io, ipairs, print, os, setmetatable, string, table =
assert, error, io, ipairs, print, os, setmetatable, string, table
module(...)
Repo = {}
Repo.__index = Repo
-- retrieves an object identified by `sha` from the repository or its packs
-- returns a file-like object (supports 'read', 'seek' and 'close'), the size
-- of the object and its type
-- errors when the object does not exist
function Repo:raw_object(sha)
-- first, look in 'objects' directory
-- first byte of sha is the directory, the rest is name of object file
sha = readable_sha(sha)
local dir = sha:sub(1,2)
local file = sha:sub(3)
local path = join_path(self.dir, 'objects', dir, file)
if not lfs.attributes(path, 'size') then
-- then, try to look in packs
for _, pack in ipairs(self.packs) do
local obj, len, typ = pack:get_object(sha)
if obj then
return obj, len, typ
end
end
error('Object not found in object neither in packs: '..sha)
else
-- the objects are zlib compressed
local f = decompressed(path)
-- retrieve the type and length - <type> SP <len> \0 <data...>
local content = read_until_nul(f)
local typ, len = content:match('(%w+) (%d+)')
return f, len, typ
end
end
--- Store a new object into the repository in `objects` directory.
-- @param data A string containing the contents of the new file.
-- @param len The length of the data.
-- @param type One of 'commit', 'blob', 'tree', 'tag'
function Repo:store_object(data, len, type)
local sha = readable_sha(object_sha(data, len, type))
local dir = sha:sub(1,2)
local file = sha:sub(3)
util.make_dir(join_path(self.dir, 'objects', dir))
local path = join_path(self.dir, 'objects', dir, file)
local fo = assert(io.open(path, 'wb'))
local header = type .. ' ' .. len .. '\0'
local compressed = deflate()(header .. data, "finish")
fo:write(compressed)
fo:close()
end
local function resolvetag(f)
local tag
local line = f:read()
while line do
tag = line:match('^object (%x+)$')
if tag then break end
line = f:read()
end
f:close()
return tag
end
function Repo:commit(sha)
local f, len, typ = self:raw_object(sha)
while typ == 'tag' do
sha = assert(resolvetag(f), 'could not parse tag for '..readable_sha(sha))
f, len, typ = self:raw_object(sha)
end
assert(typ == 'commit', string.format('%s (%s) is not a commit', sha, typ))
local commit = { id = sha, repo = self, stored = true, parents = {} }
repeat
local line = f:read()
if not line then break end
local space = line:find(' ') or 0
local word = line:sub(1, space - 1)
local afterSpace = line:sub(space + 1)
if word == 'tree' then
commit.tree_sha = afterSpace
elseif word == 'parent' then
table.insert(commit.parents, afterSpace)
elseif word == 'author' then
commit.author = afterSpace
elseif word == 'committer' then
commit.committer = afterSpace
elseif commit.message then
table.insert(commit.message, line)
elseif line == '' then
commit.message = {}
end
until false -- ends with break
f:close()
commit.message = table.concat(commit.message, '\n')
return setmetatable(commit, objects.Commit)
end
function Repo:tree(sha)
local f, len, typ = self:raw_object(sha)
assert(typ == 'tree', string.format('%s (%s) is not a tree', sha, typ))
local tree = { id = sha, repo = self, stored = true, _entries = {} }
while true do
local info = read_until_nul(f)
if not info then break end
local entry_sha = to_hex(f:read(20))
local mode, name = info:match('^(%d+)%s(.+)$')
local entry_type = 'blob'
if mode == '40000' then
entry_type = 'tree'
elseif mode == '160000' then
entry_type = 'commit'
end
tree._entries[name] = { mode = mode, id = entry_sha, type = entry_type }
end
f:close()
return setmetatable(tree, objects.Tree)
end
-- retrieves a Blob
function Repo:blob(sha)
local f, len, typ = self:raw_object(sha)
f:close() -- can be reopened in Blob:content()
assert(typ == 'blob', string.format('%s (%s) is not a blob', sha, typ))
return setmetatable({
id = sha,
len = len,
repo = self,
stored = true }, objects.Blob)
end
function Repo:head()
return self:commit(self.refs.HEAD)
end
function Repo:has_object(sha)
local dir = sha:sub(1,2)
local file = sha:sub(3)
local path = join_path(self.dir, 'objects', dir, file)
if lfs.attributes(path, 'size') then return true end
for _, pack in ipairs(self.packs) do
local has = pack:has_object(sha)
if has then return true end
end
return false
end
function Repo:checkout(sha, target)
if not target then target = self.workDir end
assert(target, 'target directory not specified')
local commit = self:commit(sha)
commit:checkout(target)
-- if the repo was checked out using the deepen command (one level of history only)
-- mark the commit's parent as shalow, that is it has no history
if self.isShallow then
-- if it has a parent, mark it shallow
if commit.parents[1] then
local f = assert(io.open(self.dir .. '/shallow', "w"))
f:write(commit.parents[1], '\n')
f:close()
end
end
end
function Repo:close()
for _, pack in ipairs(self.packs) do
pack:close()
end
end
function create(dir)
if not dir:match('%.git.?$') then
dir = join_path(dir, '.git')
end
util.make_dir(dir)
util.make_dir(dir .. '/branches')
util.make_dir(dir .. '/hooks')
util.make_dir(dir .. '/info')
util.make_dir(dir .. '/objects/info')
util.make_dir(dir .. '/objects/pack')
util.make_dir(dir .. '/refs/heads')
util.make_dir(dir .. '/refs/tags')
util.make_dir(dir .. '/refs/remotes')
do
local f = assert(io.open(dir .. "/HEAD", "w"))
f:write("ref: refs/heads/master\n")
f:close()
end
local refs = {}
local packs = {}
return setmetatable({
dir = dir,
refs = refs,
packs = packs,
}, Repo)
end
-- opens a repository located in working directory `dir` or directly a .git repo
function open(dir)
local workDir = dir
if not dir:match('%.git.?$') then
dir = join_path(dir, '.git')
else
workDir = nil -- no working directory, working directly with repo
end
local refs = {}
for _,d in ipairs{'refs/heads', 'refs/tags'} do
for fn in lfs.dir(join_path(dir, d)) do
if fn ~= '.' and fn ~= '..' then
local path = join_path(dir, d, fn)
local f = assert(io.open(path), 'rb')
local ref = f:read()
refs[join_path(d, fn)] = ref
f:close()
end
end
end
local packs = {}
for fn in lfs.dir(join_path(dir, 'objects/pack')) do
if fn:match('%.pack$') then
local path = join_path(dir, 'objects/pack', fn)
table.insert(packs, pack.open(path))
end
end
local head = io.open(join_path(dir, 'HEAD'), 'rb')
if head then
local src = head:read()
local HEAD = src:match('ref: (.-)$')
refs.HEAD = refs[HEAD]
head:close()
end
return setmetatable({
dir = dir,
workDir = workDir,
refs = refs,
packs = packs,
}, Repo)
end
return Repo

View File

@ -0,0 +1,233 @@
local lfs = require 'lfs'
local core = require 'git.core'
local deflate = core.deflate
local inflate = core.inflate
local sha = core.sha
module(..., package.seeall)
local BUF_SIZE = 4096
local dirsep = package.config:sub(1,1)
-- replaces '/' path separators on Windows with the correct ones ('\\')
function correct_separators(path)
return path:gsub('/', dirsep)
end
-- joins several path components into a single path, uses system-specific directory
-- separator, cleans input, i.e. join_path('a/', 'b', 'c/') => 'a/b/c'
function join_path(...)
local n = select('#', ...)
local args = {...}
for i=1,n do
args[i] = args[i]:gsub(dirsep..'?$', '')
end
return table.concat(args, dirsep, 1, n)
end
-- Return the path with the all occurences of '/.' or '\.' (representing
-- the current directory) removed.
local function remove_curr_dir_dots(path)
while path:match(dirsep .. "%." .. dirsep) do -- match("/%./")
path = path:gsub(dirsep .. "%." .. dirsep, dirsep) -- gsub("/%./", "/")
end
return path:gsub(dirsep .. "%.$", "") -- gsub("/%.$", "")
end
-- Return whether the path is a root.
local function is_root(path)
return path:find("^[%u%U.]?:?[/\\]$")
end
-- Return the path with the unnecessary trailing separator removed.
local function remove_trailing(path)
if path:sub(-1) == dirsep and not is_root(path) then path = path:sub(1,-2) end
return path
end
-- Extract file or directory name from its path.
local function extract_name(path)
if is_root(path) then return path end
path = remove_trailing(path)
path = path:gsub("^.*" .. dirsep, "")
return path
end
-- Return the string 'str', with all magic (pattern) characters escaped.
local function escape_magic(str)
local escaped = str:gsub('[%-%.%+%[%]%(%)%^%%%?%*%^%$]','%%%1')
return escaped
end
-- Return parent directory of the 'path' or nil if there's no parent directory.
-- If 'path' is a path to file, return the directory the file is in.
function parent_dir(path)
path = remove_curr_dir_dots(path)
path = remove_trailing(path)
local dir = path:gsub(escape_magic(extract_name(path)) .. "$", "")
if dir == "" then
return nil
else
return remove_trailing(dir)
end
end
-- Make a new directory, making also all of its parent directories that doesn't exist.
function make_dir(path)
if lfs.attributes(path) then
return true
else
local par_dir = parent_dir(path)
if par_dir then
assert(make_dir(par_dir))
end
return lfs.mkdir(path)
end
end
-- Reader class
-- adapted from Penlight: https://raw.github.com/stevedonovan/Penlight/master/lua/pl/stringio.lua
local SR = {}
SR.__index = SR
function SR:_read(fmt)
local i,str = self.i,self.str
local sz = #str
if i > sz then return nil, "past end of file" end
local res
if fmt == '*l' or fmt == '*L' then
local idx = str:find('\n',i) or (sz+1)
res = str:sub(i,fmt == '*l' and idx-1 or idx)
self.i = idx+1
elseif fmt == '*a' then
res = str:sub(i)
self.i = sz+1
elseif fmt == '*n' then
local _,i2,i2,idx
_,idx = str:find ('%s*%d+',i)
_,i2 = str:find ('^%.%d+',idx+1)
if i2 then idx = i2 end
_,i2 = str:find ('^[eE][%+%-]*%d+',idx+1)
if i2 then idx = i2 end
local val = str:sub(i,idx)
res = tonumber(val)
self.i = idx+1
elseif type(fmt) == 'number' then
res = str:sub(i,i+fmt-1)
self.i = i + fmt
else
error("bad read format",2)
end
return res
end
function SR:read(...)
if select('#',...) == 0 then
return self:_read('*l')
else
local res, fmts = {},{...}
for i = 1, #fmts do
res[i] = self:_read(fmts[i])
end
return unpack(res)
end
end
function SR:seek(whence,offset)
local base
whence = whence or 'cur'
offset = offset or 0
if whence == 'set' then
base = 1
elseif whence == 'cur' then
base = self.i
elseif whence == 'end' then
base = #self.str
end
self.i = base + offset
return self.i
end
function SR:close() -- for compatibility only
end
--- create a file-like object for reading from a given string.
-- @param s The input string.
function reader(s)
return setmetatable({str=s,i=1},SR)
end
-- decompress the file and return a handle to temporary uncompressed file
function decompressed(path)
local fi = assert(io.open(path, 'rb'))
local result = {}
local z = inflate()
repeat
local str = fi:read(BUF_SIZE)
local data = z(str)
if type(data) == 'string' then
result[#result+1] = data
else print('!!!', data) end
until not str
fi:close()
return reader(table.concat(result))
end
-- reads until the byte \0, consumes it and returns the string up to the \0
function read_until_nul(f)
local t = {}
repeat
local c = f:read(1)
if c and c ~= '\0' then t[#t+1] = c end
until not c or c == '\0'
if #t > 0 then
return table.concat(t)
else
return nil
end
end
-- converts a string to lowercase hex
function to_hex(s)
return (s:gsub('.', function(c)
return string.format('%02x', string.byte(c))
end))
end
-- converts a string from hex to binary
function from_hex(s)
return (s:gsub('..', function(cc)
return string.char(tonumber(cc, 16))
end))
end
-- always returns readable (hex) hash
function readable_sha(s)
if #s ~= 40 then return to_hex(s)
else return s end
end
-- always returns binary hash
function binary_sha(s)
if #s ~= 20 then return from_hex(s)
else return s end
end
function object_sha(data, len, type)
local header = type .. ' ' .. len .. '\0'
local res = sha(header .. data)
return res
end
function deflate(data)
local c = deflate()
return c(data, "finish")
end

View File

@ -0,0 +1,298 @@
-----------------------------------------------------------------------------
-- LTN12 - Filters, sources, sinks and pumps.
-- LuaSocket toolkit.
-- Author: Diego Nehab
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
-- Declare module
-----------------------------------------------------------------------------
local string = require("string")
local table = require("table")
local base = _G
local _M = {}
if module then -- heuristic for exporting a global package table
ltn12 = _M
end
local filter,source,sink,pump = {},{},{},{}
_M.filter = filter
_M.source = source
_M.sink = sink
_M.pump = pump
-- 2048 seems to be better in windows...
_M.BLOCKSIZE = 2048
_M._VERSION = "LTN12 1.0.3"
-----------------------------------------------------------------------------
-- Filter stuff
-----------------------------------------------------------------------------
-- returns a high level filter that cycles a low-level filter
function filter.cycle(low, ctx, extra)
base.assert(low)
return function(chunk)
local ret
ret, ctx = low(ctx, chunk, extra)
return ret
end
end
-- chains a bunch of filters together
-- (thanks to Wim Couwenberg)
function filter.chain(...)
local arg = {...}
local n = select('#',...)
local top, index = 1, 1
local retry = ""
return function(chunk)
retry = chunk and retry
while true do
if index == top then
chunk = arg[index](chunk)
if chunk == "" or top == n then return chunk
elseif chunk then index = index + 1
else
top = top+1
index = top
end
else
chunk = arg[index](chunk or "")
if chunk == "" then
index = index - 1
chunk = retry
elseif chunk then
if index == n then return chunk
else index = index + 1 end
else base.error("filter returned inappropriate nil") end
end
end
end
end
-----------------------------------------------------------------------------
-- Source stuff
-----------------------------------------------------------------------------
-- create an empty source
local function empty()
return nil
end
function source.empty()
return empty
end
-- returns a source that just outputs an error
function source.error(err)
return function()
return nil, err
end
end
-- creates a file source
function source.file(handle, io_err)
if handle then
return function()
local chunk = handle:read(_M.BLOCKSIZE)
if not chunk then handle:close() end
return chunk
end
else return source.error(io_err or "unable to open file") end
end
-- turns a fancy source into a simple source
function source.simplify(src)
base.assert(src)
return function()
local chunk, err_or_new = src()
src = err_or_new or src
if not chunk then return nil, err_or_new
else return chunk end
end
end
-- creates string source
function source.string(s)
if s then
local i = 1
return function()
local chunk = string.sub(s, i, i+_M.BLOCKSIZE-1)
i = i + _M.BLOCKSIZE
if chunk ~= "" then return chunk
else return nil end
end
else return source.empty() end
end
-- creates rewindable source
function source.rewind(src)
base.assert(src)
local t = {}
return function(chunk)
if not chunk then
chunk = table.remove(t)
if not chunk then return src()
else return chunk end
else
table.insert(t, chunk)
end
end
end
function source.chain(src, f)
base.assert(src and f)
local last_in, last_out = "", ""
local state = "feeding"
local err
return function()
if not last_out then
base.error('source is empty!', 2)
end
while true do
if state == "feeding" then
last_in, err = src()
if err then return nil, err end
last_out = f(last_in)
if not last_out then
if last_in then
base.error('filter returned inappropriate nil')
else
return nil
end
elseif last_out ~= "" then
state = "eating"
if last_in then last_in = "" end
return last_out
end
else
last_out = f(last_in)
if last_out == "" then
if last_in == "" then
state = "feeding"
else
base.error('filter returned ""')
end
elseif not last_out then
if last_in then
base.error('filter returned inappropriate nil')
else
return nil
end
else
return last_out
end
end
end
end
end
-- creates a source that produces contents of several sources, one after the
-- other, as if they were concatenated
-- (thanks to Wim Couwenberg)
function source.cat(...)
local arg = {...}
local src = table.remove(arg, 1)
return function()
while src do
local chunk, err = src()
if chunk then return chunk end
if err then return nil, err end
src = table.remove(arg, 1)
end
end
end
-----------------------------------------------------------------------------
-- Sink stuff
-----------------------------------------------------------------------------
-- creates a sink that stores into a table
function sink.table(t)
t = t or {}
local f = function(chunk, err)
if chunk then table.insert(t, chunk) end
return 1
end
return f, t
end
-- turns a fancy sink into a simple sink
function sink.simplify(snk)
base.assert(snk)
return function(chunk, err)
local ret, err_or_new = snk(chunk, err)
if not ret then return nil, err_or_new end
snk = err_or_new or snk
return 1
end
end
-- creates a file sink
function sink.file(handle, io_err)
if handle then
return function(chunk, err)
if not chunk then
handle:close()
return 1
else return handle:write(chunk) end
end
else return sink.error(io_err or "unable to open file") end
end
-- creates a sink that discards data
local function null()
return 1
end
function sink.null()
return null
end
-- creates a sink that just returns an error
function sink.error(err)
return function()
return nil, err
end
end
-- chains a sink with a filter
function sink.chain(f, snk)
base.assert(f and snk)
return function(chunk, err)
if chunk ~= "" then
local filtered = f(chunk)
local done = chunk and ""
while true do
local ret, snkerr = snk(filtered, err)
if not ret then return nil, snkerr end
if filtered == done then return 1 end
filtered = f(done)
end
else return 1 end
end
end
-----------------------------------------------------------------------------
-- Pump stuff
-----------------------------------------------------------------------------
-- pumps one chunk from the source to the sink
function pump.step(src, snk)
local chunk, src_err = src()
local ret, snk_err = snk(chunk, src_err)
if chunk and ret then return 1
else return nil, src_err or snk_err end
end
-- pumps all data from a source to a sink, using a step function
function pump.all(src, snk, step)
base.assert(src and snk)
step = step or pump.step
while true do
local ret, err = step(src, snk)
if not ret then
if err then return nil, err
else return 1 end
end
end
end
return _M

View File

@ -0,0 +1,212 @@
--[[
lua_lexer_loose.lua.
Loose lexing of Lua code. See README.
WARNING: This code is preliminary and may have errors
in its current form.
(c) 2013 David Manura. MIT License.
--]]
local M = {}
-- based on LuaBalanced
local function match_string(s, pos)
pos = pos or 1
local posa = pos
local c = s:sub(pos,pos)
if c == '"' or c == "'" then
pos = pos + 1
while 1 do
pos = s:find("[" .. c .. "\\]", pos)
if not pos then return s:sub(posa), #s + 1 end -- not terminated string
if s:sub(pos,pos) == c then
local part = s:sub(posa, pos)
return part, pos + 1
else
pos = pos + 2
end
end
else
local sc = s:match("^%[(=*)%[", pos)
if sc then
local _; _, pos = s:find("%]" .. sc .. "%]", pos)
if not pos then return s:sub(posa), #s + 1 end -- not terminated string
local part = s:sub(posa, pos)
return part, pos + 1
else
return nil, pos
end
end
end
-- based on LuaBalanced
local function match_comment(s, pos)
pos = pos or 1
if s:sub(pos, pos+1) ~= '--' then
return nil, pos
end
pos = pos + 2
if s:sub(pos,pos) == '[' then
local partt, post = match_string(s, pos)
if partt then
return '--' .. partt, post
end
end
local part; part, pos = s:match('^([^\n]*\n?)()', pos)
return '--' .. part, pos
end
-- note: matches invalid numbers too (for example, 0x)
local function match_numberlike(s, pos)
local hex = s:match('^0[xX]', pos)
if hex then pos = pos + #hex end
local longint = (hex and '^%x+' or '^%d+') .. '[uU]?[lL][lL]'
local mantissa1 = hex and '^%x+%.?%x*' or '^%d+%.?%d*'
local mantissa2 = hex and '^%.%x+' or '^%.%d+'
local exponent = hex and '^[pP][+%-]?%x*' or '^[eE][+%-]?%d*'
local imaginary = '^[iI]'
local tok = s:match(longint, pos)
if not tok then
tok = s:match(mantissa1, pos) or s:match(mantissa2, pos)
if tok then
local tok2 = s:match(exponent, pos + #tok)
if tok2 then tok = tok..tok2 end
tok2 = s:match(imaginary, pos + #tok)
if tok2 then tok = tok..tok2 end
end
end
return tok and (hex or '') .. tok or hex
end
local function newset(s)
local t = {}
for c in s:gmatch'.' do t[c] = true end
return t
end
local function qws(s)
local t = {}
for k in s:gmatch'%S+' do t[k] = true end
return t
end
local sym = newset("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_")
local dig = newset('0123456789')
local name = "([_A-Za-z][_A-Za-z0-9]*)"
local op = newset('=~<>.+-*/%^#=<>;:,.{}[]()')
op['=='] = true
op['<='] = true
op['>='] = true
op['~='] = true
op['..'] = true
op['<<'] = true
op['>>'] = true
op['//'] = true
local is_keyword = qws[[
and break do else elseif end false for function if
in local nil not or repeat return
then true until while goto]]
function M.lex(code, f, pos)
local pos = pos or 1
local tok = code:match('^#![^\n]*\n', pos) -- shebang
if tok then f('Shebang', tok, 1) pos = pos + #tok end
while pos <= #code do
local p2, n2, n1, n3 = code:match('^%s*()((%S)(%S?))', pos)
if not p2 then assert(code:sub(pos):match('^%s*$')); break end
pos = p2
if sym[n1] then
local tok = code:match('^'..name, pos)
assert(tok)
if is_keyword[tok] then
f('Keyword', tok, pos)
else
f('Id', tok, pos)
end
pos = pos + #tok
elseif n2 == '--' then
local tok, pos2 = match_comment(code, pos)
assert(tok)
f('Comment', tok, pos)
pos = pos2
elseif n2 == '::' then
local tok = code:match('^(::%s*'..name..'%s*::)', pos)
if tok then
f('Label', tok, pos)
pos = pos + #tok
else
f('Unknown', code:sub(pos, pos+1), pos) -- unterminated label
pos = pos + 2
end
elseif n1 == '\'' or n1 == '\"' or n2 == '[[' or n2 == '[=' then
local tok = match_string(code, pos)
if tok then
f('String', tok, pos)
pos = pos + #tok
else
f('Unknown', code:sub(pos), pos) -- unterminated string
pos = #code + 1
end
elseif dig[n1] or (n1 == '.' and dig[n3]) then
local tok = match_numberlike(code, pos)
assert(tok)
f('Number', tok, pos)
pos = pos + #tok
elseif op[n2] then
if n2 == '..' and code:match('^%.', pos+2) then
tok = '...'
else
tok = n2
end
f('Keyword', tok, pos)
pos = pos + #tok
elseif op[n1] then
local tok = n1
f('Keyword', tok, pos)
pos = pos + #tok
else
f('Unknown', n1, pos)
pos = pos + 1
end
end
end
local Stream = {}
Stream.__index = Stream
function Stream:next(val)
if self._next then
local _next = self._next
self._next = nil
return _next
else
self._next = nil
return self.f()
end
end
function Stream:peek()
if self._next then
return self._next
else
local _next = self.f()
self._next = _next
return _next
end
end
function M.lexc(code, f, pos)
local yield = coroutine.yield
local func = coroutine.wrap(f or function()
M.lex(code, function(tag, name, pos)
-- skip Comment tags as they may arbitrarily split statements and affects their processing
if tag ~= 'Comment' then yield {tag=tag, name, lineinfo=pos} end
end, pos)
yield {tag='Eof', lineinfo = #code+1}
end)
return setmetatable({f=func}, Stream)
end
return M

View File

@ -0,0 +1,337 @@
--[[
lua_parser_loose.lua.
Loose parsing of Lua code. See README.
(c) 2013 David Manura. MIT License.
--]]
local PARSE = {}
local unpack = table.unpack or unpack
local LEX = require 'lua_lexer_loose'
--[[
Loose parser.
lx - lexer stream of Lua tokens.
f(event...) - callback function to send events to.
Events generated:
'Var', name, lineinfo - variable declaration that immediately comes into scope.
'VarSelf', name, lineinfo - same as 'Var' but for implicit 'self' parameter
in method definitions. lineinfo is zero-width space after '('
'VarNext', name, lineinfo - variable definition that comes into scope
upon next statement.
'VarInside', name, lineinfo - variable definition that comes into scope
inside following block. Used for control variables in 'for' statements.
'Id', name, lineinfo - reference to variable.
'String', name - string or table field.
'Scope', opt - beginning of scope block.
'EndScope', nil, lineinfo - end of scope block.
'FunctionCall', name, lineinfo - function call (in addition to other events).
'Function', name, lineinfo - function definition.
--]]
function PARSE.parse_scope(lx, f, level)
local cprev = {tag='Eof'}
-- stack of scopes.
local scopes = {{}}
for l = 2, (level or 1) do scopes[l] = {} end
local function scope_begin(opt, lineinfo, nobreak)
scopes[#scopes+1] = {}
f('Scope', opt, lineinfo, nobreak)
end
local function scope_end(opt, lineinfo)
local scope = #scopes
if scope > 1 then table.remove(scopes) end
local inside_local = false
for scope = scope-1, 1, -1 do
if scopes[scope].inside_local then inside_local = true; break end
end
f('EndScope', opt, lineinfo, inside_local)
end
local function parse_function_list(has_self, name, pos)
local c = lx:next(); assert(c[1] == '(')
f('Statement', c[1], c.lineinfo, true) -- generate Statement for function definition
scope_begin(c[1], c.lineinfo, true)
local vars = {} -- accumulate vars (if any) to send after 'Function'
if has_self then
local lineinfo = c.lineinfo+1 -- zero size
table.insert(vars, {'VarSelf', 'self', lineinfo, true})
end
while true do
local n = lx:peek()
if not (n.tag == 'Id' or n.tag == 'Keyword' and n[1] == '...') then break end
local c = lx:next()
if c.tag == 'Id' then table.insert(vars, {'Var', c[1], c.lineinfo, true}) end
-- ignore '...' in this case
if lx:peek()[1] == ',' then lx:next() end
end
if lx:peek()[1] == ')' then
lx:next()
f('Function', name, pos or c.lineinfo, true)
end
for _, var in ipairs(vars) do f(unpack(var)) end
end
while true do
local c = lx:next()
-- Detect end of previous statement
if c.tag == 'Eof' -- trigger 'Statement' at the end of file
or c.tag == 'Keyword' and (
c[1] == 'break' or c[1] == 'goto' or c[1] == 'do' or c[1] == 'while' or
c[1] == 'repeat' or c[1] == 'if' or c[1] == 'for' or c[1] == 'function' and lx:peek().tag == 'Id' or
c[1] == 'local' or c[1] == ';' or c[1] == 'until' or c[1] == 'return' or c[1] == 'end') or
c.tag == 'Id' and
(cprev.tag == 'Id' or
cprev.tag == 'Keyword' and
(cprev[1] == ']' or cprev[1] == ')' or cprev[1] == '}' or
cprev[1] == '...' or cprev[1] == 'end' or
cprev[1] == 'true' or cprev[1] == 'false' or
cprev[1] == 'nil') or
cprev.tag == 'Number' or cprev.tag == 'String')
then
if scopes[#scopes].inside_until then scope_end(nil, c.lineinfo) end
local scope = #scopes
if not scopes[scope].inside_table then scopes[scope].inside_local = nil end
f('Statement', c[1], c.lineinfo,
scopes[scope].inside_local or c[1] == 'local' or c[1] == 'function' or c[1] == 'end')
end
if c.tag == 'Eof' then break end
-- Process token(s)
if c.tag == 'Keyword' then
if c[1] == 'local' and lx:peek().tag == 'Keyword' and lx:peek()[1] == 'function' then
-- local function
local c = lx:next(); assert(c[1] == 'function')
if lx:peek().tag == 'Id' then
c = lx:next()
f('Var', c[1], c.lineinfo, true)
if lx:peek()[1] == '(' then parse_function_list(nil, c[1], c.lineinfo) end
end
elseif c[1] == 'function' then
if lx:peek()[1] == '(' then -- inline function
parse_function_list()
elseif lx:peek().tag == 'Id' then -- function definition statement
c = lx:next(); assert(c.tag == 'Id')
local name = c[1]
local pos = c.lineinfo
f('Id', name, pos, true)
local has_self
while lx:peek()[1] ~= '(' and lx:peek().tag ~= 'Eof' do
c = lx:next()
name = name .. c[1]
if c.tag == 'Id' then
f('String', c[1], c.lineinfo, true)
elseif c.tag == 'Keyword' and c[1] == ':' then
has_self = true
end
end
if lx:peek()[1] == '(' then parse_function_list(has_self, name, pos) end
end
elseif c[1] == 'local' and lx:peek().tag == 'Id' then
scopes[#scopes].inside_local = true
c = lx:next()
f('VarNext', c[1], c.lineinfo, true)
while lx:peek().tag == 'Keyword' and lx:peek()[1] == ',' do
c = lx:next(); if lx:peek().tag ~= 'Id' then break end
c = lx:next()
f('VarNext', c[1], c.lineinfo, true)
end
elseif c[1] == 'for' and lx:peek().tag == 'Id' then
c = lx:next()
f('VarInside', c[1], c.lineinfo, true)
while lx:peek().tag == 'Keyword' and lx:peek()[1] == ',' do
c = lx:next(); if lx:peek().tag ~= 'Id' then break end
c = lx:next()
f('VarInside', c[1], c.lineinfo, true)
end
elseif c[1] == 'goto' and lx:peek().tag == 'Id' then
lx:next()
elseif c[1] == 'do' then
scope_begin('do', c.lineinfo)
-- note: do/while/for statement scopes all begin at 'do'.
elseif c[1] == 'repeat' or c[1] == 'then' then
scope_begin(c[1], c.lineinfo)
elseif c[1] == 'end' or c[1] == 'elseif' then
scope_end(c[1], c.lineinfo)
elseif c[1] == 'else' then
scope_end(nil, c.lineinfo)
scope_begin(c[1], c.lineinfo)
elseif c[1] == 'until' then
scopes[#scopes].inside_until = true
elseif c[1] == '{' then
scopes[#scopes].inside_table = (scopes[#scopes].inside_table or 0) + 1
elseif c[1] == '}' then
local newval = (scopes[#scopes].inside_table or 0) - 1
newval = newval >= 1 and newval or nil
scopes[#scopes].inside_table = newval
end
elseif c.tag == 'Id' then
local scope = #scopes
local inside_local = scopes[scope].inside_local ~= nil
local inside_table = scopes[scope].inside_table
local cnext = lx:peek()
if cnext.tag == 'Keyword' and (cnext[1] == '(' or cnext[1] == '{')
or cnext.tag == 'String' then
f('FunctionCall', c[1], c.lineinfo, inside_local)
end
-- either this is inside a table or it continues from a comma,
-- which may be a field assignment, so assume it's in a table
if (inside_table or cprev[1] == ',') and cnext.tag == 'Keyword' and cnext[1] == '=' then
-- table field; table fields are tricky to handle during incremental
-- processing as "a = 1" may be either an assignment (in which case
-- 'a' is Id) or a field initialization (in which case it's a String).
-- Since it's not possible to decide between two cases in isolation,
-- this is not a good place to insert a break; instead, the break is
-- inserted at the location of the previous keyword, which allows
-- to properly handle those cases. The desired location of
-- the restart point is returned as the `nobreak` value.
f('String', c[1], c.lineinfo,
inside_local or cprev and cprev.tag == 'Keyword' and cprev.lineinfo)
elseif cprev.tag == 'Keyword' and (cprev[1] == ':' or cprev[1] == '.') then
f('String', c[1], c.lineinfo, true)
else
f('Id', c[1], c.lineinfo, true)
-- this looks like the left side of (multi-variable) assignment
-- unless it's a part of `= var, field = value`, so skip if inside a table
if not inside_table and not (cprev and cprev.tag == 'Keyword' and cprev[1] == '=') then
while lx:peek().tag == 'Keyword' and lx:peek()[1] == ',' do
local c = lx:next(); if lx:peek().tag ~= 'Id' then break end
c = lx:next()
f('Id', c[1], c.lineinfo, true)
end
end
end
end
if c.tag ~= 'Comment' then cprev = c end
end
end
--[[
This is similar to parse_scope but determines if variables are local or global.
lx - lexer stream of Lua tokens.
f(event...) - callback function to send events to.
Events generated:
'Id', name, lineinfo, 'local'|'global'
(plus all events in parse_scope)
--]]
function PARSE.parse_scope_resolve(lx, f, vars)
local NEXT = {} -- unique key
local INSIDE = {} -- unique key
local function newscope(vars, opt, lineinfo)
local newvars = opt=='do' and vars[INSIDE] or {}
if newvars == vars[INSIDE] then vars[INSIDE] = false end
newvars[INSIDE]=false
newvars[NEXT]=false
local level = (vars[0] or 0) + 1
newvars[0] = level -- keep the current level
newvars[-1] = lineinfo -- keep the start of the scope
newvars[level] = newvars -- reference the current vars table
return setmetatable(newvars, {__index=vars})
end
vars = vars or newscope({[0] = 0}, nil, 1)
vars[NEXT] = false -- vars that come into scope upon next statement
vars[INSIDE] = false -- vars that come into scope upon entering block
PARSE.parse_scope(lx, function(op, name, lineinfo, nobreak)
-- in some (rare) cases VarNext can follow Statement event (which copies
-- vars[NEXT]). This may cause vars[0] to be `nil`, so default to 1.
local var = op:find("^Var") and
{fpos = lineinfo, at = (vars[0] or 1) + (op == 'VarInside' and 1 or 0),
masked = vars[name], self = (op == 'VarSelf') or nil } or nil
if op == 'Var' or op == 'VarSelf' then
vars[name] = var
elseif op == 'VarNext' then
vars[NEXT] = vars[NEXT] or {}
vars[NEXT][name] = var
elseif op == 'VarInside' then
vars[INSIDE] = vars[INSIDE] or {}
vars[INSIDE][name] = var
elseif op == 'Scope' then
vars = newscope(vars, name, lineinfo)
elseif op == 'EndScope' then
local mt = getmetatable(vars)
if mt ~= nil then vars = mt.__index end
elseif op == 'Id'
or op == 'String' or op == 'FunctionCall' or op == 'Function' then
-- Just make callback
elseif op == 'Statement' then -- beginning of statement
-- Apply vars that come into scope upon beginning of statement.
if vars[NEXT] then
for k,v in pairs(vars[NEXT]) do
vars[k] = v; vars[NEXT][k] = nil
end
end
else
assert(false)
end
f(op, name, lineinfo, vars, nobreak)
end, vars[0])
end
function PARSE.extract_vars(code, f)
local lx = LEX.lexc(code)
local char0 = 1 -- next char offset to write
local function gen(char1, nextchar0)
char0 = nextchar0
end
PARSE.parse_scope_resolve(lx, function(op, name, lineinfo, other)
if op == 'Id' then
f('Id', name, other, lineinfo)
elseif op == 'Var' or op == 'VarNext' or op == 'VarInside' then
gen(lineinfo, lineinfo+#name)
f('Var', name, "local", lineinfo)
end -- ignore 'VarSelf' and others
end)
gen(#code+1, nil)
end
--[[
Converts 5.2 code to 5.1 style code with explicit _ENV variables.
Example: "function f(_ENV, x) print(x, y)" -->
"function _ENV.f(_ENV, x) _ENV.print(x, _ENV.y) end"
code - string of Lua code. Assumed to be valid Lua (FIX: 5.1 or 5.2?)
f(s) - call back function to send chunks of Lua code output to. Example: io.stdout.
--]]
function PARSE.replace_env(code, f)
if not f then return PARSE.accumulate(PARSE.replace_env, code) end
PARSE.extract_vars(code, function(op, name, other)
if op == 'Id' then
f(other == 'global' and '_ENV.' .. name or name)
elseif op == 'Var' or op == 'Other' then
f(name)
end
end)
end
-- helper function. Can be passed as argument `f` to functions
-- like `replace_env` above to accumulate fragments into a single string.
function PARSE.accumulator()
local ts = {}
local mt = {}
mt.__index = mt
function mt:__call(s) ts[#ts+1] = s end
function mt:result() return table.concat(ts) end
return setmetatable({}, mt)
end
-- helper function
function PARSE.accumulate(g, code)
local accum = PARSE.accumulator()
g(code, accum)
return accum:result()
end
return PARSE

View File

@ -0,0 +1,746 @@
#!/usr/bin/env lua
-- Command line interface to LuaDist-git.
local dist = require "dist"
local utils = require "dist.utils"
local depends = require "dist.depends"
local package = require "dist.package"
local mf = require "dist.manifest"
local cfg = require "dist.config"
local sys = require "dist.sys"
-- CLI commands of Luadist.
local commands
commands = {
-- Print help for this command line interface.
["help"] = {
help = [[
Usage: luadist [DEPLOYMENT_DIRECTORY] <COMMAND> [ARGUMENTS...] [-VARIABLES...]
Commands:
help - print this help
install - install modules
remove - remove modules
refresh - update information about modules in repositories
list - list installed modules
info - show information about modules
search - search repositories for modules
fetch - download modules
make - manually deploy modules from local paths
upload - upload installed modules to their repositories
tree - print dependency tree of a module
selftest - run the selftest of LuaDist
To get help on specific command, run:
luadist help <COMMAND>
]],
run = function (deploy_dir, help_item)
deploy_dir = deploy_dir or dist.get_deploy_dir()
help_item = help_item or {}
assert(type(deploy_dir) == "string", "luadist.help: Argument 'deploy_dir' is not a string.")
assert(type(help_item) == "table", "luadist.help: Argument 'help_item' is not a table.")
deploy_dir = sys.abs_path(deploy_dir)
if not help_item or not commands[help_item[1]] then
help_item = "help"
else
help_item = help_item[1]
end
print_info()
print(commands[help_item].help)
return 0
end
},
-- Install modules.
["install"] = {
help = [[
Usage: luadist [DEPLOYMENT_DIRECTORY] install MODULES... [-VARIABLES...]
The 'install' command will install specified MODULES to
DEPLOYMENT_DIRECTORY. LuaDist will also automatically resolve, download
and install all dependencies.
If DEPLOYMENT_DIRECTORY is not specified, the deployment directory
of LuaDist is used.
You can use * (an asterisk sign) in the name of the module as a wildcard
with the meaning 'any symbols' (in most shells, the module name then must
be quoted to prevent the expansion of asterisk by the shell itself).
Optional CMake VARIABLES in -D format (e.g. -Dvariable=value) or LuaDist
configuration VARIABLES (e.g. -variable=value) can be specified.
The -simulate configuration option makes LuaDist only to simulate the
installation of modules (no modules will be really installed).
]],
run = function (deploy_dir, modules, cmake_variables)
deploy_dir = deploy_dir or dist.get_deploy_dir()
if type(modules) == "string" then modules = {modules} end
cmake_variables = cmake_variables or {}
assert(type(deploy_dir) == "string", "luadist.install: Argument 'deploy_dir' is not a string.")
assert(type(modules) == "table", "luadist.install: Argument 'modules' is not a string or table.")
assert(type(cmake_variables) == "table", "luadist.install: Argument 'cmake_variables' is not a table.")
deploy_dir = sys.abs_path(deploy_dir)
if cfg.simulate then
print("NOTE: this is just simulation.")
end
if #modules == 0 then
print("No modules to install specified.")
return 0
end
local ok, err = dist.install(modules, deploy_dir, cmake_variables)
if not ok then
print(err)
os.exit(1)
else
print((cfg.simulate and "Simulated installation" or "Installation") .. " successful.")
return 0
end
end
},
-- Remove modules.
["remove"] = {
help = [[
Usage: luadist [DEPLOYMENT_DIRECTORY] remove MODULES... [-VARIABLES...]
The 'remove' command will remove specified MODULES from
DEPLOYMENT_DIRECTORY. If no module is specified, all modules
will be removed.
If DEPLOYMENT_DIRECTORY is not specified, the deployment directory
of LuaDist is used. If no MODULES are specified, all installed modules
will be removed.
You can use * (an asterisk sign) in the name of the module as a wildcard
with the meaning 'any symbols' (in most shells, the module name then must
be quoted to prevent the expansion of asterisk by the shell itself).
Optional LuaDist configuration VARIABLES (e.g. -variable=value) can be
specified.
WARNING: dependencies between modules are NOT taken into account when
removing modules!
]],
run = function (deploy_dir, modules)
deploy_dir = deploy_dir or dist.get_deploy_dir()
if type(modules) == "string" then modules = {modules} end
assert(type(deploy_dir) == "string", "luadist.remove: Argument 'deploy_dir' is not a string.")
assert(type(modules) == "table", "luadist.remove: Argument 'modules' is not a string or table.")
deploy_dir = sys.abs_path(deploy_dir)
local num, err = dist.remove(modules, deploy_dir)
if not num then
print(err)
os.exit(1)
else
print("Removed modules: " .. num)
return 0
end
end
},
-- Update repositories.
["refresh"] = {
help = [[
Usage: luadist [DEPLOYMENT_DIRECTORY] refresh [-VARIABLES...]
The 'refresh' command will update information about modules in all software
repositories of specified DEPLOYMENT_DIRECTORY. Also, the cached dependency
manifest, built from previous installations or invocations of 'tree'
functionality will be deleted.
If DEPLOYMENT_DIRECTORY is not specified, the deployment directory
of LuaDist is used.
Optional LuaDist configuration VARIABLES (e.g. -variable=value) can be
specified.
]],
run = function (deploy_dir)
deploy_dir = deploy_dir or dist.get_deploy_dir()
assert(type(deploy_dir) == "string", "luadist.refresh: Argument 'deploy_dir' is not a string.")
deploy_dir = sys.abs_path(deploy_dir)
-- TODO: should be deleting the dep_manifest decoupled from refreshing the repository info?
-- delete cached dependency manifest
local dep_manifest_file = sys.abs_path(sys.make_path(deploy_dir, cfg.dep_cache_file))
local dep_mf_deleted = false
if sys.exists(dep_manifest_file) then
sys.delete(dep_manifest_file)
dep_mf_deleted = true
end
-- refresh repository information
local ok, err = dist.update_manifest(deploy_dir)
if not ok then
print(err)
os.exit(1)
else
print("Repositories successfuly updated" .. (dep_mf_deleted and " and dependency cache deleted" or "") .. ".")
return 0
end
end
},
-- Manually deploy modules.
["make"] = {
help = [[
Usage: luadist [DEPLOYMENT_DIRECTORY] make MODULE_PATHS... [-VARIABLES...]
The 'make' command will manually deploy modules from specified local
MODULE_PATHS into the DEPLOYMENT_DIRECTORY.
The MODULE_PATHS will be preserved. If DEPLOYMENT_DIRECTORY is not
specified, the deployment directory of LuaDist is used.
Optional CMake VARIABLES in -D format (e.g. -Dvariable=value) or LuaDist
configuration VARIABLES (e.g. -variable=value) can be specified.
The -simulate configuration option makes LuaDist only to simulate the
deployment of modules (no modules will be really deployed).
WARNING: this command does NOT check whether the dependencies of deployed
modules are satisfied or not!
]],
run = function (deploy_dir, module_paths, cmake_variables)
deploy_dir = deploy_dir or dist.get_deploy_dir()
module_paths = module_paths or {}
cmake_variables = cmake_variables or {}
assert(type(deploy_dir) == "string", "luadist.make: Argument 'deploy_dir' is not a string.")
assert(type(module_paths) == "table", "luadist.make: Argument 'module_paths' is not a table.")
assert(type(cmake_variables) == "table", "luadist.make: Argument 'cmake_variables' is not a table.")
deploy_dir = sys.abs_path(deploy_dir)
if cfg.simulate then
print("NOTE: this is just simulation.")
end
if #module_paths == 0 then
print("No module paths to deploy specified.")
return 0
end
local ok, err = dist.make(deploy_dir, module_paths, cmake_variables)
if not ok then
print(err)
os.exit(1)
end
print((cfg.simulate and "Simulated deployment" or "Deployment") .. " successful.")
return 0
end
},
-- Download modules.
["fetch"] = {
help = [[
Usage: luadist [FETCH_DIRECTORY] fetch MODULES... [-VARIABLES...]
The 'fetch' command will download specified MODULES to the FETCH_DIRECTORY.
If no FETCH_DIRECTORY is specified, the temporary directory of LuaDist
deployment directory (i.e. ']] .. cfg.temp_dir .. [[') is used.
If the version is not specified in module name, the most recent version
available will be downloaded.
Optional LuaDist configuration VARIABLES (e.g. -variable=value) can be
specified.
]],
run = function (fetch_dir, modules)
fetch_dir = fetch_dir or dist.get_deploy_dir()
modules = modules or {}
assert(type(fetch_dir) == "string", "luadist.fetch: Argument 'fetch_dir' is not a string.")
assert(type(modules) == "table", "luadist.fetch: Argument 'modules' is not a table.")
fetch_dir = sys.abs_path(fetch_dir)
-- if the default parameter (i.e. deploy_dir) is passed, use the default temp_dir
if fetch_dir == dist.get_deploy_dir() then
fetch_dir = sys.make_path(fetch_dir, cfg.temp_dir)
end
if #modules == 0 then
print("No modules to download specified.")
return 0
end
local ok, err = dist.fetch(modules, fetch_dir)
if not ok then
print(err)
os.exit(1)
else
print("Modules successfuly downloaded to '" .. fetch_dir .. "'.")
return 0
end
end
},
-- Upload modules.
["upload"] = {
help = [[
Usage: luadist [DEPLOYMENT_DIRECTORY] upload MODULES... [-VARIABLES...]
The 'upload' command will upload the binary versions of specified MODULES,
installed in the DEPLOYMENT_DIRECTORY, to their LuaDist repositories.
Base url of repositories is given by configuration variable 'upload_url'
(by default ']] .. cfg.upload_url .. [[') which you can change.
E.g.: Binary version of module 'lua', installed in DEPLOYMENT_DIRECTORY,
will now be uploaded to repository ']] .. cfg.upload_url .. [[lua.git'.
Organization of uploaded modules and their repositories is subject
to the conventions described in more detail in the source code
of the 'dist.upload_modules()' function (file 'dist/init.lua').
If DEPLOYMENT_DIRECTORY is not specified, the deployment directory
of LuaDist is used. If no MODULES are specified, all installed modules
will be uploaded.
You can use * (an asterisk sign) in the name of the module as a wildcard
with the meaning 'any symbols' (in most shells, the module name then must
be quoted to prevent the expansion of asterisk by the shell itself).
Optional LuaDist configuration VARIABLES (e.g. -variable=value) can be
specified.
]],
run = function (deploy_dir, modules)
-- check if we have git
local ok = utils.system_dependency_available("git", "git --version")
if not ok then os.exit(1) end
deploy_dir = deploy_dir or dist.get_deploy_dir()
if type(modules) == "string" then modules = {modules} end
assert(type(deploy_dir) == "string", "luadist.upload: Argument 'deploy_dir' is not a string.")
assert(type(modules) == "table", "luadist.upload: Argument 'modules' is not a string or table.")
deploy_dir = sys.abs_path(deploy_dir)
local num, err = dist.upload_modules(deploy_dir, modules, cfg.upload_url)
if not num then
print(err)
os.exit(1)
else
print("Uploaded modules: " .. num)
return 0
end
end
},
-- List installed modules.
["list"] = {
help = [[
Usage: luadist [DEPLOYMENT_DIRECTORY] list [STRINGS...] [-VARIABLES...]
The 'list' command will list all modules installed in specified
DEPLOYMENT_DIRECTORY, which contain one or more optional STRINGS.
If DEPLOYMENT_DIRECTORY is not specified, the deployment directory
of LuaDist is used. If STRINGS are not specified, all installed modules
are listed.
Optional LuaDist configuration VARIABLES (e.g. -variable=value) can be
specified.
]],
run = function (deploy_dir, strings)
deploy_dir = deploy_dir or dist.get_deploy_dir()
strings = strings or {}
assert(type(deploy_dir) == "string", "luadist.list: Argument 'deploy_dir' is not a string.")
assert(type(strings) == "table", "luadist.list: Argument 'strings' is not a table.")
deploy_dir = sys.abs_path(deploy_dir)
local deployed = dist.get_deployed(deploy_dir)
deployed = depends.filter_packages_by_strings(deployed, strings)
print("\nInstalled modules:")
print("==================\n")
for _, pkg in pairs(deployed) do
print(" " .. pkg.name .. "-" .. pkg.version .. "\t(" .. pkg.arch .. "-" .. pkg.type .. ")" .. (pkg.provided_by and "\t [provided by " .. pkg.provided_by .. "]" or ""))
end
print()
return 0
end
},
-- Search for modules in repositories.
["search"] = {
help = [[
Usage: luadist [DEPLOYMENT_DIRECTORY] search [STRINGS...] [-VARIABLES...]
The 'search' command will list all modules from repositories, which contain
one or more STRINGS.
If no STRINGS are specified, all available modules are listed.
Optional LuaDist configuration VARIABLES (e.g. -variable=value) can be
specified.
]],
run = function (deploy_dir, strings)
deploy_dir = deploy_dir or dist.get_deploy_dir()
strings = strings or {}
assert(type(deploy_dir) == "string", "luadist.search: Argument 'deploy_dir' is not a string.")
assert(type(strings) == "table", "luadist.search: Argument 'strings' is not a table.")
deploy_dir = sys.abs_path(deploy_dir)
local available, err = mf.get_manifest()
if not available then
print(err)
os.exit(1)
end
available = depends.filter_packages_by_strings(available, strings)
available = depends.sort_by_names(available)
print("\nModules found:")
print("==============\n")
for _, pkg in pairs(available) do
print(" " .. pkg.name)
end
print()
return 0
end
},
-- Show information about modules.
["info"] = {
help = [[
Usage: luadist [DEPLOYMENT_DIRECTORY] info [MODULES...] [-VARIABLES...]
The 'info' command shows information about specified modules from
repositories. This command also shows whether modules are installed
in DEPLOYMENT_DIRECTORY.
If no MODULES are specified, all available modules are shown.
If DEPLOYMENT_DIRECTORY is not specified, the deployment directory
of LuaDist is used.
Optional LuaDist configuration VARIABLES (e.g. -variable=value) can be
specified.
]],
run = function (deploy_dir, modules)
deploy_dir = deploy_dir or dist.get_deploy_dir()
modules = modules or {}
assert(type(deploy_dir) == "string", "luadist.info: Argument 'deploy_dir' is not a string.")
assert(type(modules) == "table", "luadist.info: Argument 'modules' is not a table.")
deploy_dir = sys.abs_path(deploy_dir)
local manifest, err = mf.get_manifest()
if not manifest then
print(err)
os.exit(1)
end
-- if no packages specified explicitly, show just info from .gitmodules for all packages available
if #modules == 0 then
modules = manifest
modules = depends.sort_by_names(modules)
local deployed = dist.get_deployed(deploy_dir)
print("")
for _, pkg in pairs(modules) do
print(" " .. pkg.name)
print(" Repository url: " .. (pkg.path or "N/A"))
print()
end
return 0
-- if some packages explicitly specified, retrieve and show detailed info about them
else
if #modules > 5 then
print("NOTE: More than 5 modules specified - operation may take a longer time.")
end
local deployed = dist.get_deployed(deploy_dir)
for _, module in pairs(modules) do
manifest, err = package.get_versions_info(module, manifest, deploy_dir, deployed)
if not manifest then
print(err)
os.exit(1)
end
end
modules = depends.find_packages(modules, manifest)
modules = depends.sort_by_names(modules)
print("")
for _, pkg in pairs(modules) do
print(" " .. pkg.name .. "-" .. pkg.version .. " (" .. pkg.arch .. "-" .. pkg.type ..")" .. (pkg.from_installed and " [info taken from installed version]" or ""))
print(" Description: " .. (pkg.desc or "N/A"))
print(" Author: " .. (pkg.author or "N/A"))
print(" Homepage: " .. (pkg.url or "N/A"))
print(" License: " .. (pkg.license or "N/A"))
print(" Repository url: " .. (pkg.path or "N/A"))
print(" Maintainer: " .. (pkg.maintainer or "N/A"))
if pkg.provides then print(" Provides: " .. utils.table_tostring(pkg.provides)) end
if pkg.depends then print(" Depends: " .. utils.table_tostring(pkg.depends)) end
if pkg.conflicts then print(" Conflicts: " .. utils.table_tostring(pkg.conflicts)) end
print(" State: " .. (depends.is_installed(pkg.name, deployed, pkg.version) and "installed" or "not installed"))
print()
end
return 0
end
end
},
-- Print dependency tree.
["tree"] = {
help = [[
Usage: luadist [DEPLOYMENT_DIRECTORY] tree [MODULES...] [-VARIABLES...]
The 'tree' command prints dependency tree for specified modules.
If no MODULES are specified, trees for all available modules are printed.
This information about modules is being cached in dependency manifest.
Optional LuaDist configuration VARIABLES (e.g. -variable=value) can be
specified.
]],
run = function (deploy_dir, modules)
deploy_dir = deploy_dir or dist.get_deploy_dir()
modules = modules or {}
assert(type(deploy_dir) == "string", "luadist.info: Argument 'deploy_dir' is not a string.")
assert(type(modules) == "table", "luadist.info: Argument 'modules' is not a table.")
deploy_dir = sys.abs_path(deploy_dir)
local manifest, err = mf.get_manifest()
if not manifest then
print(err)
os.exit(1)
end
-- if no modules specified explicitly, assume all modules
if #modules == 0 then modules = depends.sort_by_names(manifest) end
print("Getting dependency information... (this may take a lot of time)")
for _, module in pairs(modules) do
-- if all modules are being queried, extract the name
if type(module) == "table" then module = module.name end
local dep_manifest, err = dist.dependency_info(module, deploy_dir)
if not dep_manifest then
print(err)
os.exit(1)
else
-- print the dependency tree
local heading = "Dependency tree for '" .. module .. "' (on " .. cfg.arch .. "-" .. cfg.type .. "):"
print("\n" .. heading .. "")
print(string.rep("=", #heading) .. "\n")
for _, pkg in pairs(dep_manifest) do
local pkg_version, pkg_tag = pkg.version, pkg.version
if pkg.was_scm_version then
pkg_version, pkg_tag = "scm", "HEAD"
end
print(" " .. pkg.name .. "-" .. pkg_version .. " (" .. pkg.path .. ", " .. pkg_tag .. ")")
if pkg.depends then
for _, dep in pairs(pkg.depends) do
if type(dep) ~= "table" then
local found = depends.sort_by_versions(depends.find_packages(dep, dep_manifest))[1]
if not found then
print("Could not find the dependency '" .. dep .. "' in the dependency manifest.")
os.exit(1)
end
print(" * " .. found.name .. "-" .. found.version .. " (" .. found.path .. ", " .. found.version .. ")")
end
end
end
print()
end
end
end
return 0
end
},
-- Selftest of LuaDist.
["selftest"] = {
help = [[
Usage: luadist [TEST_DIRECTORY] selftest [-VARIABLES...]
The 'selftest' command runs tests of LuaDist, located in TEST_DIRECTORY and
displays the results.
If no TEST_DIRECTORY is specified, the default test directory of LuaDist
deployment directory (i.e. ']] .. cfg.test_dir .. [[') is used.
Optional LuaDist configuration VARIABLES (e.g. -variable=value) can be
specified.
]],
run = function (test_dir)
test_dir = test_dir or dist.get_deploy_dir()
assert(type(test_dir) == "string", "luadist.selftest: Argument 'deploy_dir' is not a string.")
test_dir = sys.abs_path(test_dir)
-- if the default parameter (i.e. deploy_dir) is passed, use the default test_dir
if test_dir == dist.get_deploy_dir() then
test_dir = sys.make_path(test_dir, cfg.test_dir)
end
-- try to get an iterator over test files and check it
local test_iterator, err = sys.get_directory(test_dir)
if not test_iterator then
print("Running tests from '" .. test_dir .. "' failed: " .. err)
os.exit(1)
end
-- run the tests
print("\nRunning tests:")
print("==============")
for test_file in sys.get_directory(test_dir) do
test_file = sys.make_path(test_dir, test_file)
if sys.is_file(test_file) then
print()
print(sys.extract_name(test_file) .. ":")
dofile(test_file)
end
end
print()
return 0
end
},
}
-- Run the functionality of LuaDist 'command' in the 'deploy_dir' with other items
-- or settings/variables starting at 'other_idx' index of special variable 'arg'.
local function run_command(deploy_dir, command, other_idx)
deploy_dir = deploy_dir or dist.get_deploy_dir()
assert(type(deploy_dir) == "string", "luadist.run_command: Argument 'deploy_dir' is not a string.")
assert(type(command) == "string", "luadist.run_command: Argument 'command' is not a string.")
assert(not other_idx or type(other_idx) == "number", "luadist.run_command: Argument 'other_idx' is not a number.")
deploy_dir = sys.abs_path(deploy_dir)
local items = {}
local cmake_variables = {}
-- parse items after the command (and LuaDist or CMake variables)
if other_idx then
for i = other_idx, #arg do
-- CMake variable
if arg[i]:match("^%-D(.-)=(.*)$") then
local variable, value = arg[i]:match("^%-D(.-)=(.*)$")
cmake_variables[variable] = value
-- LuaDist variable
elseif arg[i]:match("^%-(.-)=(.*)$") then
local variable, value = arg[i]:match("^%-(.-)=(.*)$")
apply_settings(variable, value)
-- LuaDist boolean variable with implicit 'true' value
elseif arg[i]:match("^%-(.-)$") then
local variable, value = arg[i]:match("^%-(.-)$")
apply_settings(variable, "true")
-- not a LuaDist or CMake variable
else
table.insert(items, arg[i])
end
end
end
-- run the required LuaDist functionality
return commands[command].run(sys.abs_path(deploy_dir), items, cmake_variables)
end
-- Print information about Luadist (version, license, etc.).
function print_info()
print([[
LuaDist-git ]].. cfg.version .. [[ - Lua package manager for the LuaDist deployment system.
Released under the MIT License. See https://github.com/luadist/luadist-git
]])
return 0
end
-- Convenience function for printing the main luadist help.
function print_help()
return run_command(nil, "help")
end
-- Set the LuaDist 'variable' to the 'value'.
-- See available settings in 'dist.config' module.
function apply_settings(variable, value)
assert(type(variable) == "string", "luadist.apply_settings: Argument 'variable' is not a string.")
assert(type(value) == "string", "luadist.apply_settings: Argument 'value' is not a string.")
-- check whether the settings variable exists
if cfg[variable] == nil then
print("Unknown LuaDist configuration option: '" .. variable .. "'.")
os.exit(1)
-- ensure the right type
elseif type(cfg[variable]) == "boolean" then
value = value:lower()
if value == "true" or value == "yes" or value == "on" or value == "1" then
value = true
elseif value == "false" or value == "no" or value == "off" or value == "0" then
value = false
else
print("Value of LuaDist option '" .. variable .. "' must be a boolean.")
os.exit(1)
end
elseif type(cfg[variable]) == "number" then
value = tonumber(value)
if not value then
print("Value of LuaDist option '" .. variable .. "' must be a number.")
os.exit(1)
end
elseif type(cfg[variable]) == "table" then
local err
value, err = utils.make_table(value, ",")
if not value then
print("Error when parsing the LuaDist variable '" .. variable .. "': " .. err)
os.exit(1)
end
end
-- set the LuaDist variable
cfg[variable] = value
end
-- Parse command line input and run the required command.
if pcall(debug.getlocal, 4, 1) then
return commands -- return commands when used as module
elseif not commands[arg[1]] and commands[arg[2]] then
-- deploy_dir specified
return run_command(arg[1], arg[2], 3)
elseif commands[arg[1]] then
-- deploy_dir not specified
return run_command(dist.get_deploy_dir(), arg[1], 2)
else
-- unknown command
if arg[1] then
print("Unknown command '" .. arg[1] .. "'. Printing help...\n")
print_help()
os.exit(1)
end
return print_help()
end

View File

@ -0,0 +1,915 @@
-- luainspect.ast - Lua Abstract Syntax Tree (AST) and token list operations.
--
-- Two main structures are maintained. A Metalua-style AST represents the
-- nested syntactic structure obtained from the parse.
-- A separate linear ordered list of tokens represents the syntactic structure
-- from the lexing, including line information (character positions only not row/columns),
-- comments, and keywords, which is originally built from the lineinfo attributes
-- injected by Metalua into the AST (IMPROVE: it probably would be simpler
-- to obtain this from the lexer directly rather then inferring it from the parsing).
-- During AST manipulations, the lineinfo maintained in the AST is ignored
-- because it was found more difficult to maintain and not in the optimal format.
--
-- The contained code deals with
-- - Building the AST from source.
-- - Building the tokenlist from the AST lineinfo.
-- - Querying the AST+tokenlist.
-- - Modifying the AST+tokenlist (including incremental parsing source -> AST)
-- - Annotating the AST with navigational info (e.g. parent links) to assist queries.
-- - Dumping the tokenlist for debugging.
--
-- (c) 2010 David Manura, MIT License.
--! require 'luainspect.typecheck' (context)
local mlc = require 'metalua.compiler'.new()
local M = {}
--[=TESTSUITE
-- utilities
local ops = {}
ops['=='] = function(a,b) return a == b end
local function check(opname, a, b)
local op = assert(ops[opname])
if not op(a,b) then
error("fail == " .. tostring(a) .. " " .. tostring(b))
end
end
--]=]
-- CATEGORY: debug
local function DEBUG(...)
if LUAINSPECT_DEBUG then
print('DEBUG:', ...)
end
end
-- Converts character position to row,column position in string src.
-- Add values are 1-indexed.
function M.pos_to_linecol(pos, src)
local linenum = 1
local lasteolpos = 0
for eolpos in src:gmatch"()\n" do
if eolpos > pos then break end
linenum = linenum + 1
lasteolpos = eolpos
end
local colnum = pos - lasteolpos
return linenum, colnum
end
-- Removes any sheband ("#!") line from Lua source string.
-- CATEGORY: Lua parsing
function M.remove_shebang(src)
local shebang = src:match("^#![^\r\n]*")
return shebang and (" "):rep(#shebang) .. src:sub(#shebang+1) or src
end
-- Custom version of loadstring that parses out line number info
-- CATEGORY: Lua parsing
function M.loadstring(src)
local f, err = loadstring(src, "")
if f then
return f
else
err = err:gsub('^%[string ""%]:', "")
local linenum = assert(err:match("(%d+):"))
local colnum = 0
local linenum2 = err:match("^%d+: '[^']+' expected %(to close '[^']+' at line (%d+)")
return nil, err, linenum, colnum, linenum2
end
end
-- helper for ast_from_string. Raises on error.
-- FIX? filename currently ignored in Metalua
-- CATEGORY: Lua parsing
local function ast_from_string_helper(src, filename)
return mlc:src_to_ast(src, filename)
end
-- Counts number of lines in text.
-- Warning: the decision of whether to count a trailing new-line in a file
-- or an empty file as a line is a little subjective. This function currently
-- defines the line count as 1 plus the number of new line characters.
-- CATEGORY: utility/string
local function linecount(text)
local n = 1
for _ in text:gmatch'\n' do
n = n + 1
end
return n
end
-- Converts Lua source string to Lua AST (via mlp/gg).
-- CATEGORY: Lua parsing
function M.ast_from_string(src, filename)
local ok, ast = pcall(ast_from_string_helper, src, filename)
if not ok then
local err = ast
err = err:match('[^\n]*')
err = err:gsub("^.-:%s*line", "line")
-- mlp.chunk prepending this is undesirable. error(msg,0) would be better in gg.lua. Reported.
-- TODO-Metalua: remove when fixed in Metalua.
local linenum, colnum = err:match("line (%d+), char (%d+)")
if not linenum then
-- Metalua libraries may return "...gg.lua:56: .../mlp_misc.lua:179: End-of-file expected"
-- without the normal line/char numbers given things like "if x then end end". Should be
-- fixed probably with gg.parse_error in _chunk in mlp_misc.lua.
-- TODO-Metalua: remove when fixed in Metalua.
linenum = linecount(src)
colnum = 1
end
local linenum2 = nil
return nil, err, linenum, colnum, linenum2
else
return ast
end
end
-- Simple comment parser. Returns Metalua-style comment.
-- CATEGORY: Lua lexing
local function quick_parse_comment(src)
local s = src:match"^%-%-([^\n]*)()\n$"
if s then return {s, 1, #src, 'short'} end
local _, s = src:match(lexer.lexer.patterns.long_comment .. '\r?\n?$')
if s then return {s, 1, #src, 'long'} end
return nil
end
--FIX:check new-line correctness
--note: currently requiring \n at end of single line comment to avoid
-- incremental compilation with `--x\nf()` and removing \n from still
-- recognizing as comment `--x`.
-- currently allowing \r\n at end of long comment since Metalua includes
-- it in lineinfo of long comment (FIX:Metalua?)
-- Gets length of longest prefix string in both provided strings.
-- Returns max n such that text1:sub(1,n) == text2:sub(1,n) and n <= max(#text1,#text2)
-- CATEGORY: string utility
local function longest_prefix(text1, text2)
local nmin = 0
local nmax = math.min(#text1, #text2)
while nmax > nmin do
local nmid = math.ceil((nmin+nmax)/2)
if text1:sub(1,nmid) == text2:sub(1,nmid) then
nmin = nmid
else
nmax = nmid-1
end
end
return nmin
end
-- Gets length of longest postfix string in both provided strings.
-- Returns max n such that text1:sub(-n) == text2:sub(-n) and n <= max(#text1,#text2)
-- CATEGORY: string utility
local function longest_postfix(text1, text2)
local nmin = 0
local nmax = math.min(#text1, #text2)
while nmax > nmin do
local nmid = math.ceil((nmin+nmax)/2)
if text1:sub(-nmid) == text2:sub(-nmid) then --[*]
nmin = nmid
else
nmax = nmid-1
end
end
return nmin
end -- differs from longest_prefix only on line [*]
-- Determines AST node that must be re-evaluated upon changing code string from
-- `src` to `bsrc`, given previous top_ast/tokenlist/src.
-- Note: decorates top_ast as side-effect.
-- If preserve is true, then does not expand AST match even if replacement is invalid.
-- CATEGORY: AST/tokenlist manipulation
function M.invalidated_code(top_ast, tokenlist, src, bsrc, preserve)
-- Converts posiiton range in src to position range in bsrc.
local function range_transform(src_fpos, src_lpos)
local src_nlpos = #src - src_lpos
local bsrc_fpos = src_fpos
local bsrc_lpos = #bsrc - src_nlpos
return bsrc_fpos, bsrc_lpos
end
if src == bsrc then return end -- up-to-date
-- Find range of positions in src that differences correspond to.
-- Note: for zero byte range, src_pos2 = src_pos1 - 1.
local npre = longest_prefix(src, bsrc)
local npost = math.min(#src-npre, longest_postfix(src, bsrc))
-- note: min avoids overlap ambiguity
local src_fpos, src_lpos = 1 + npre, #src - npost
-- Find smallest AST node containing src range above. May also
-- be contained in (smaller) comment or whitespace.
local match_ast, match_comment, iswhitespace =
M.smallest_ast_containing_range(top_ast, tokenlist, src_fpos, src_lpos)
DEBUG('invalidate-smallest:', match_ast and (match_ast.tag or 'notag'), match_comment, iswhitespace)
-- Determine which (ast, comment, or whitespace) to match, and get its pos range in src and bsrc.
local srcm_fpos, srcm_lpos, bsrcm_fpos, bsrcm_lpos, mast, mtype
if iswhitespace then
mast, mtype = nil, 'whitespace'
srcm_fpos, srcm_lpos = src_fpos, src_lpos
elseif match_comment then
mast, mtype = match_comment, 'comment'
srcm_fpos, srcm_lpos = match_comment.fpos, match_comment.lpos
else
mast, mtype = match_ast, 'ast'
repeat
srcm_fpos, srcm_lpos = M.ast_pos_range(mast, tokenlist)
if not srcm_fpos then
if mast == top_ast then
srcm_fpos, srcm_lpos = 1, #src
break
else
M.ensure_parents_marked(top_ast)
mast = mast.parent
end
end
until srcm_fpos
end
bsrcm_fpos, bsrcm_lpos = range_transform(srcm_fpos, srcm_lpos)
-- Never expand match if preserve specified.
if preserve then
return srcm_fpos, srcm_lpos, bsrcm_fpos, bsrcm_lpos, mast, mtype
end
-- Determine if replacement could break parent nodes.
local isreplacesafe
if mtype == 'whitespace' then
if bsrc:sub(bsrcm_fpos, bsrcm_lpos):match'^%s*$' then -- replaced with whitespace
if bsrc:sub(bsrcm_fpos-1, bsrcm_lpos+1):match'%s' then -- not eliminating whitespace
isreplacesafe = true
end
end
elseif mtype == 'comment' then
local m2src = bsrc:sub(bsrcm_fpos, bsrcm_lpos)
DEBUG('invalidate-comment[' .. m2src .. ']')
if quick_parse_comment(m2src) then -- replaced with comment
isreplacesafe = true
end
end
if isreplacesafe then -- return on safe replacement
return srcm_fpos, srcm_lpos, bsrcm_fpos, bsrcm_lpos, mast, mtype
end
-- Find smallest containing statement block that will compile (or top_ast).
while 1 do
match_ast = M.get_containing_statementblock(match_ast, top_ast)
if match_ast == top_ast then
return 1,#src, 1, #bsrc, match_ast, 'statblock'
-- entire AST invalidated
end
local srcm_fpos, srcm_lpos = M.ast_pos_range(match_ast, tokenlist)
local bsrcm_fpos, bsrcm_lpos = range_transform(srcm_fpos, srcm_lpos)
local msrc = bsrc:sub(bsrcm_fpos, bsrcm_lpos)
DEBUG('invalidate-statblock:', match_ast and match_ast.tag, '[' .. msrc .. ']')
if loadstring(msrc) then -- compiled
return srcm_fpos, srcm_lpos, bsrcm_fpos, bsrcm_lpos, match_ast, 'statblock'
end
M.ensure_parents_marked(top_ast)
match_ast = match_ast.parent
end
end
-- Walks AST `ast` in arbitrary order, visiting each node `n`, executing `fdown(n)` (if specified)
-- when doing down and `fup(n)` (if specified) when going if.
-- CATEGORY: AST walk
function M.walk(ast, fdown, fup)
assert(type(ast) == 'table')
if fdown then fdown(ast) end
for _,bast in ipairs(ast) do
if type(bast) == 'table' then
M.walk(bast, fdown, fup)
end
end
if fup then fup(ast) end
end
-- Replaces contents of table t1 with contents of table t2.
-- Does not change metatable (if any).
-- This function is useful for swapping one AST node with another
-- while preserving any references to the node.
-- CATEGORY: table utility
function M.switchtable(t1, t2)
for k in pairs(t1) do t1[k] = nil end
for k in pairs(t2) do t1[k] = t2[k] end
end
-- Inserts all elements in list bt at index i in list t.
-- CATEGORY: table utility
local function tinsertlist(t, i, bt)
local oldtlen, delta = #t, i - 1
for ti = #t + 1, #t + #bt do t[ti] = false end -- preallocate (avoid holes)
for ti = oldtlen, i, -1 do t[ti + #bt] = t[ti] end -- shift
for bi = 1, #bt do t[bi + delta] = bt[bi] end -- fill
end
--[=[TESTSUITE:
local function _tinsertlist(t, i, bt)
for bi=#bt,1,-1 do table.insert(t, i, bt[bi]) end
end -- equivalent but MUCH less efficient for large tables
local function _tinsertlist(t, i, bt)
for bi=1,#bt do table.insert(t, i+bi-1, bt[bi]) end
end -- equivalent but MUCH less efficient for large tables
local t = {}; tinsertlist(t, 1, {}); assert(table.concat(t)=='')
local t = {}; tinsertlist(t, 1, {2,3}); assert(table.concat(t)=='23')
local t = {4}; tinsertlist(t, 1, {2,3}); assert(table.concat(t)=='234')
local t = {2}; tinsertlist(t, 2, {3,4}); assert(table.concat(t)=='234')
local t = {4,5}; tinsertlist(t, 1, {2,3}); assert(table.concat(t)=='2345')
local t = {2,5}; tinsertlist(t, 2, {3,4}); assert(table.concat(t)=='2345')
local t = {2,3}; tinsertlist(t, 3, {4,5}); assert(table.concat(t)=='2345')
print 'DONE'
--]=]
-- Gets list of keyword positions related to node ast in source src
-- note: ast must be visible, i.e. have lineinfo (e.g. unlike `Id "self" definition).
-- Note: includes operators.
-- Note: Assumes ast Metalua-style lineinfo is valid.
-- CATEGORY: tokenlist build
function M.get_keywords(ast, src)
local list = {}
if not ast.lineinfo then return list end
-- examine space between each pair of children i and j.
-- special cases: 0 is before first child and #ast+1 is after last child
-- Put children in lexical order.
-- Some binary operations have arguments reversed from lexical order.
-- For example, `a > b` becomes `Op{'lt', `Id 'b', `Id 'a'}
local oast =
(ast.tag == 'Op' and #ast == 3 and tostring(ast[2].lineinfo.first):match('|L(%d+)') > tostring(ast[3].lineinfo.first):match('|L(%d+)'))
and {ast[1], ast[3], ast[2]} or ast
local i = 0
while i <= #ast do
-- j is node following i that has lineinfo
local j = i+1; while j < #ast+1 and not oast[j].lineinfo do j=j+1 end
-- Get position range [fpos,lpos] between subsequent children.
local fpos
if i == 0 then -- before first child
fpos = tonumber(tostring(ast.lineinfo.first):match('|L(%d+)'))
else
local last = oast[i].lineinfo.last; local c = last.comments
fpos = (c and #c > 0 and c[#c][3] or tostring(last):match('|L(%d+)')) + 1
end
local lpos
if j == #ast+1 then -- after last child
lpos = tonumber(tostring(ast.lineinfo.last):match('|L(%d+)'))
else
local first = oast[j].lineinfo.first; local c = first.comments
lpos = (c and #c > 0 and c[1][2] or tostring(first):match('|L(%d+)')) - 1
end
-- Find keyword in range.
local spos = fpos
repeat
local mfpos, tok, mlppos = src:match("^%s*()(%a+)()", spos)
if not mfpos then
mfpos, tok, mlppos = src:match("^%s*()(%p+)()", spos)
end
if mfpos then
local mlpos = mlppos-1
if mlpos > lpos then mlpos = lpos end
if mlpos >= mfpos then
list[#list+1] = mfpos
list[#list+1] = mlpos
end
end
spos = mlppos
until not spos or spos > lpos
-- note: finds single keyword. in `local function` returns only `local`
--DEBUG(i,j ,'test[' .. src:sub(fpos, lpos) .. ']')
i = j -- next
--DESIGN:Lua: comment: string.match accepts a start position but not a stop position
end
return list
end
-- Q:Metalua: does ast.lineinfo[loc].comments imply #ast.lineinfo[loc].comments > 0 ?
-- Generates ordered list of tokens in top_ast/src.
-- Note: currently ignores operators and parens.
-- Note: Modifies ast.
-- Note: Assumes ast Metalua-style lineinfo is valid.
-- CATEGORY: AST/tokenlist query
local isterminal = {Nil=true, Dots=true, True=true, False=true, Number=true, String=true,
Dots=true, Id=true}
local function compare_tokens_(atoken, btoken) return atoken.fpos < btoken.fpos end
function M.ast_to_tokenlist(top_ast, src)
local tokens = {} -- {nbytes=#src}
local isseen = {}
M.walk(top_ast, function(ast)
if isterminal[ast.tag] then -- Extract terminal
local token = ast
if ast.lineinfo then
token.fpos = tonumber(tostring(ast.lineinfo.first):match('|L(%d+)'))
token.lpos = tonumber(tostring(ast.lineinfo.last):match('|L(%d+)'))
token.ast = ast
table.insert(tokens, token)
end
else -- Extract non-terminal
local keywordposlist = M.get_keywords(ast, src)
for i=1,#keywordposlist,2 do
local fpos, lpos = keywordposlist[i], keywordposlist[i+1]
local toksrc = src:sub(fpos, lpos)
local token = {tag='Keyword', fpos=fpos, lpos=lpos, ast=ast, toksrc}
table.insert(tokens, token)
end
end
-- Extract comments
for i=1,2 do
local comments = ast.lineinfo and ast.lineinfo[i==1 and 'first' or 'last'].comments
if comments then for _, comment in ipairs(comments) do
if not isseen[comment] then
comment.tag = 'Comment'
local token = comment
token.fpos = tonumber(tostring(comment.lineinfo.first):match('|L(%d+)'))
token.lpos = tonumber(tostring(comment.lineinfo.last):match('|L(%d+)'))
token.ast = comment
table.insert(tokens, token)
isseen[comment] = true
end
end end
end
end, nil)
table.sort(tokens, compare_tokens_)
return tokens
end
-- Gets tokenlist range [fidx,lidx] covered by ast. Returns nil,nil if not found.
--FIX:PERFORMANCE:this is slow on large files.
-- CATEGORY: AST/tokenlist query
function M.ast_idx_range_in_tokenlist(tokenlist, ast)
-- Get list of primary nodes under ast.
local isold = {}; M.walk(ast, function(ast) isold[ast] = true end)
-- Get range.
local fidx, lidx
for idx=1,#tokenlist do
local token = tokenlist[idx]
if isold[token.ast] then
lidx = idx
if not fidx then fidx = idx end
end
end
return fidx, lidx
end
-- Gets index range in tokenlist overlapped by character position range [fpos, lpos].
-- For example, `do ff() end` with range ` ff() ` would match tokens `ff()`.
-- Tokens partly inside range are counted, so range `f()` would match tokens `ff()`.
-- If lidx = fidx - 1, then position range is whitespace between tokens lidx (on left)
-- and fidx (on right), and this may include token pseudoindices 0 (start of file) and
-- #tokenlist+1 (end of file).
-- Note: lpos == fpos - 1 indicates zero-width range between chars lpos and fpos.
-- CATEGORY: tokenlist query
function M.tokenlist_idx_range_over_pos_range(tokenlist, fpos, lpos)
-- Find first/last indices of tokens overlapped (even partly) by position range.
local fidx, lidx
for idx=1,#tokenlist do
local token = tokenlist[idx]
--if (token.fpos >= fpos and token.fpos <= lpos) or (token.lpos >= fpos and token.lpos <= lpos) then -- token overlaps range
if fpos <= token.lpos and lpos >= token.fpos then -- range overlaps token (even partially)
if not fidx then fidx = idx end
lidx = idx
end
end
if not fidx then -- on fail, check between tokens
for idx=1,#tokenlist+1 do -- between idx-1 and idx
local tokfpos, toklpos = tokenlist[idx-1] and tokenlist[idx-1].lpos, tokenlist[idx] and tokenlist[idx].fpos
if (not tokfpos or fpos > tokfpos) and (not toklpos or lpos < toklpos) then -- range between tokens
return idx, idx-1
end
end
end
return fidx, lidx
end
--[=[TESTSUITE
local function test(...)
return table.concat({M.tokenlist_idx_range_over_pos_range(...)}, ',')
end
check('==', test({}, 2, 2), "1,0") -- no tokens
check('==', test({{tag='Id', fpos=1, lpos=1}}, 2, 2), "2,1") -- right of one token
check('==', test({{tag='Id', fpos=3, lpos=3}}, 2, 2), "1,0") -- left of one token
check('==', test({{tag='Id', fpos=3, lpos=4}}, 2, 3), "1,1") -- left partial overlap one token
check('==', test({{tag='Id', fpos=3, lpos=4}}, 4, 5), "1,1") -- right partial overlap one token
check('==', test({{tag='Id', fpos=3, lpos=6}}, 4, 5), "1,1") -- partial inner overlap one token
check('==', test({{tag='Id', fpos=3, lpos=6}}, 3, 6), "1,1") -- exact overlap one token
check('==', test({{tag='Id', fpos=4, lpos=5}}, 3, 6), "1,1") -- extra overlap one token
check('==', test({{tag='Id', fpos=2, lpos=3}, {tag='Id', fpos=5, lpos=6}}, 4, 4), "2,1") -- between tokens, " " exact
check('==', test({{tag='Id', fpos=2, lpos=3}, {tag='Id', fpos=5, lpos=6}}, 4, 3), "2,1") -- between tokens, "" on left
check('==', test({{tag='Id', fpos=2, lpos=3}, {tag='Id', fpos=5, lpos=6}}, 5, 4), "2,1") -- between tokens, "" on right
check('==', test({{tag='Id', fpos=2, lpos=3}, {tag='Id', fpos=4, lpos=5}}, 4, 3), "2,1") -- between tokens, "" exact
--]=]
-- Removes tokens in tokenlist covered by ast.
-- CATEGORY: tokenlist manipulation
local function remove_ast_in_tokenlist(tokenlist, ast)
local fidx, lidx = M.ast_idx_range_in_tokenlist(tokenlist, ast)
if fidx then -- note: fidx implies lidx
for idx=lidx,fidx,-1 do table.remove(tokenlist, idx) end
end
end
-- Inserts tokens from btokenlist into tokenlist. Preserves sort.
-- CATEGORY: tokenlist manipulation
local function insert_tokenlist(tokenlist, btokenlist)
local ftoken = btokenlist[1]
if ftoken then
-- Get index in tokenlist in which to insert tokens in btokenlist.
local fidx
for idx=1,#tokenlist do
if tokenlist[idx].fpos > ftoken.fpos then fidx = idx; break end
end
fidx = fidx or #tokenlist + 1 -- else append
-- Insert tokens.
tinsertlist(tokenlist, fidx, btokenlist)
end
end
-- Get character position range covered by ast in tokenlist. Returns nil,nil on not found.
-- CATEGORY: AST/tokenlist query
function M.ast_pos_range(ast, tokenlist) -- IMPROVE:style: ast_idx_range_in_tokenlist has params reversed
local fidx, lidx = M.ast_idx_range_in_tokenlist(tokenlist, ast)
if fidx then
return tokenlist[fidx].fpos, tokenlist[lidx].lpos
else
return nil, nil
end
end
-- Gets string representation of AST node. nil if none.
-- IMPROVE: what if node is empty block?
-- CATEGORY: AST/tokenlist query
function M.ast_to_text(ast, tokenlist, src) -- IMPROVE:style: ast_idx_range_in_tokenlist has params reversed
local fpos, lpos = M.ast_pos_range(ast, tokenlist)
if fpos then
return src:sub(fpos, lpos)
else
return nil
end
end
-- Gets smallest AST node in top_ast/tokenlist/src
-- completely containing position range [pos1, pos2].
-- careful: "function" is not part of the `Function node.
-- If range is inside comment, returns comment also.
-- If range is inside whitespace, then returns true in third return value.
-- CATEGORY: AST/tokenlist query
function M.smallest_ast_containing_range(top_ast, tokenlist, pos1, pos2)
local f0idx, l0idx = M.tokenlist_idx_range_over_pos_range(tokenlist, pos1, pos2)
-- Find enclosing AST.
M.ensure_parents_marked(top_ast)
local fidx, lidx = f0idx, l0idx
while tokenlist[fidx] and not tokenlist[fidx].ast.parent do fidx = fidx - 1 end
while tokenlist[lidx] and not tokenlist[lidx].ast.parent do lidx = lidx + 1 end
-- DEBUG(fidx, lidx, f0idx, l0idx, #tokenlist, pos1, pos2, tokenlist[fidx], tokenlist[lidx])
local ast = not (tokenlist[fidx] and tokenlist[lidx]) and top_ast or
M.common_ast_parent(tokenlist[fidx].ast, tokenlist[lidx].ast, top_ast)
-- DEBUG('m2', tokenlist[fidx], tokenlist[lidx], top_ast, ast, ast and ast.tag)
if l0idx == f0idx - 1 then -- whitespace
return ast, nil, true
elseif l0idx == f0idx and tokenlist[l0idx].tag == 'Comment' then
return ast, tokenlist[l0idx], nil
else
return ast, nil, nil
end
end
--IMPROVE: handle string edits and maybe others
-- Gets smallest statement block containing position pos or
-- nearest statement block before pos, whichever is smaller, given ast/tokenlist.
function M.current_statementblock(ast, tokenlist, pos)
local fidx,lidx = M.tokenlist_idx_range_over_pos_range(tokenlist, pos, pos)
if fidx > lidx then fidx = lidx end -- use nearest backward
-- Find closest AST node backward
while fidx >= 1 and tokenlist[fidx].tag == 'Comment' do fidx=fidx-1 end
if fidx < 1 then return ast, false end
local mast = tokenlist[fidx].ast
if not mast then return ast, false end
mast = M.get_containing_statementblock(mast, ast)
local isafter = false
if mast.tag2 ~= 'Block' then
local mfidx,mlidx = M.ast_idx_range_in_tokenlist(tokenlist, mast)
if pos > mlidx then
isafter = true
end
end
return mast, isafter
end
-- Gets index of bast in ast (nil if not found).
-- CATEGORY: AST query
function M.ast_idx(ast, bast)
for idx=1,#ast do
if ast[idx] == bast then return idx end
end
return nil
end
-- Gets parent of ast and index of ast in parent.
-- Root node top_ast must also be provided. Returns nil, nil if ast is root.
-- Note: may call mark_parents.
-- CATEGORY: AST query
function M.ast_parent_idx(top_ast, ast)
if ast == top_ast then return nil, nil end
M.ensure_parents_marked(top_ast); assert(ast.parent)
local idx = M.ast_idx(ast.parent, ast)
return ast.parent, idx
end
-- Gets common parent of aast and bast. Always returns value.
-- Must provide root top_ast too.
-- CATEGORY: AST query
function M.common_ast_parent(aast, bast, top_ast)
M.ensure_parents_marked(top_ast)
local isparent = {}
local tast = bast; repeat isparent[tast] = true; tast = tast.parent until not tast
local uast = aast; repeat if isparent[uast] then return uast end; uast = uast.parent until not uast
assert(false)
end
-- Replaces old_ast with new_ast/new_tokenlist in top_ast/tokenlist.
-- Note: assumes new_ast is a block. assumes old_ast is a statement or block.
-- CATEGORY: AST/tokenlist
function M.replace_statements(top_ast, tokenlist, old_ast, new_ast, new_tokenlist)
remove_ast_in_tokenlist(tokenlist, old_ast)
insert_tokenlist(tokenlist, new_tokenlist)
if old_ast == top_ast then -- special case: no parent
M.switchtable(old_ast, new_ast) -- note: safe since block is not in tokenlist.
else
local parent_ast, idx = M.ast_parent_idx(top_ast, old_ast)
table.remove(parent_ast, idx)
tinsertlist(parent_ast, idx, new_ast)
end
-- fixup annotations
for _,bast in ipairs(new_ast) do
if top_ast.tag2 then M.mark_tag2(bast, bast.tag == 'Do' and 'StatBlock' or 'Block') end
if old_ast.parent then M.mark_parents(bast, old_ast.parent) end
end
end
-- Adjusts lineinfo in tokenlist.
-- All char positions starting at pos1 are shifted by delta number of chars.
-- CATEGORY: tokenlist
function M.adjust_lineinfo(tokenlist, pos1, delta)
for _,token in ipairs(tokenlist) do
if token.fpos >= pos1 then
token.fpos = token.fpos + delta
end
if token.lpos >= pos1 then
token.lpos = token.lpos + delta
end
end
--tokenlist.nbytes = tokenlist.nbytes + delta
end
-- For each node n in ast, sets n.parent to parent node of n.
-- Assumes ast.parent will be parent_ast (may be nil)
-- CATEGORY: AST query
function M.mark_parents(ast, parent_ast)
ast.parent = parent_ast
for _,ast2 in ipairs(ast) do
if type(ast2) == 'table' then
M.mark_parents(ast2, ast)
end
end
end
-- Calls mark_parents(ast) if ast not marked.
-- CATEGORY: AST query
function M.ensure_parents_marked(ast)
if ast[1] and not ast[1].parent then M.mark_parents(ast) end
end
-- For each node n in ast, sets n.tag2 to context string:
-- 'Block' - node is block
-- 'Stat' - node is statement
-- 'StatBlock' - node is statement and block (i.e. `Do)
-- 'Exp' - node is expression
-- 'Explist' - node is expression list (or identifier list)
-- 'Pair' - node is key-value pair in table constructor
-- note: ast.tag2 will be set to context.
-- CATEGORY: AST query
local iscertainstat = {Do=true, Set=true, While=true, Repeat=true, If=true,
Fornum=true, Forin=true, Local=true, Localrec=true, Return=true, Break=true}
function M.mark_tag2(ast, context)
context = context or 'Block'
ast.tag2 = context
for i,bast in ipairs(ast) do
if type(bast) == 'table' then
local nextcontext
if bast.tag == 'Do' then
nextcontext = 'StatBlock'
elseif iscertainstat[bast.tag] then
nextcontext = 'Stat'
elseif bast.tag == 'Call' or bast.tag == 'Invoke' then
nextcontext = context == 'Block' and 'Stat' or 'Exp'
--DESIGN:Metalua: these calls actually contain expression lists,
-- but the expression list is not represented as a complete node
-- by Metalua (as blocks are in `Do statements)
elseif bast.tag == 'Pair' then
nextcontext = 'Pair'
elseif not bast.tag then
if ast.tag == 'Set' or ast.tag == 'Local' or ast.tag == 'Localrec'
or ast.tag == 'Forin' and i <= 2
or ast.tag == 'Function' and i == 1
then
nextcontext = 'Explist'
else
nextcontext = 'Block'
end
else
nextcontext = 'Exp'
end
M.mark_tag2(bast, nextcontext)
end
end
end
-- Gets smallest statement or block containing or being `ast`.
-- The AST root node `top_ast` must also be provided.
-- Note: may decorate AST as side-effect (mark_tag2/mark_parents).
-- top_ast is assumed a block, so this is always successful.
-- CATEGORY: AST query
function M.get_containing_statementblock(ast, top_ast)
if not top_ast.tag2 then M.mark_tag2(top_ast) end
if ast.tag2 == 'Stat' or ast.tag2 == 'StatBlock' or ast.tag2 == 'Block' then
return ast
else
M.ensure_parents_marked(top_ast)
return M.get_containing_statementblock(ast.parent, top_ast)
end
end
-- Finds smallest statement, block, or comment AST in ast/tokenlist containing position
-- range [fpos, lpos]. If allowexpand is true (default nil) and located AST
-- coincides with position range, then next containing statement is used
-- instead (this allows multiple calls to further expand the statement selection).
-- CATEGORY: AST query
function M.select_statementblockcomment(ast, tokenlist, fpos, lpos, allowexpand)
--IMPROVE: rename ast to top_ast
local match_ast, comment_ast = M.smallest_ast_containing_range(ast, tokenlist, fpos, lpos)
local select_ast = comment_ast or M.get_containing_statementblock(match_ast, ast)
local nfpos, nlpos = M.ast_pos_range(select_ast, tokenlist)
--DEBUG('s', nfpos, nlpos, fpos, lpos, match_ast.tag, select_ast.tag)
if allowexpand and fpos == nfpos and lpos == nlpos then
if comment_ast then
-- Select enclosing statement.
select_ast = match_ast
nfpos, nlpos = M.ast_pos_range(select_ast, tokenlist)
else
-- note: multiple times may be needed to expand selection. For example, in
-- `for x=1,2 do f() end` both the statement `f()` and block `f()` have
-- the same position range.
M.ensure_parents_marked(ast)
while select_ast.parent and fpos == nfpos and lpos == nlpos do
select_ast = M.get_containing_statementblock(select_ast.parent, ast)
nfpos, nlpos = M.ast_pos_range(select_ast, tokenlist)
end
end
end
return nfpos, nlpos
end
-- Converts tokenlist to string representation for debugging.
-- CATEGORY: tokenlist debug
function M.dump_tokenlist(tokenlist)
local ts = {}
for i,token in ipairs(tokenlist) do
ts[#ts+1] = 'tok.' .. i .. ': [' .. token.fpos .. ',' .. token.lpos .. '] '
.. tostring(token[1]) .. ' ' .. tostring(token.ast.tag)
end
return table.concat(ts, '\n') -- .. 'nbytes=' .. tokenlist.nbytes .. '\n'
end
--FIX:Q: does this handle Unicode ok?
--FIX?:Metalua: fails on string with escape sequence '\/'. The Reference Manual
-- doesn't say this sequence is valid though.
--FIX:Metalua: In `local --[[x]] function --[[y]] f() end`,
-- 'x' comment omitted from AST.
--FIX:Metalua: `do --[[x]] end` doesn't generate comments in AST.
-- `if x then --[[x]] end` and `while 1 do --[[x]] end` generates
-- comments in first/last of block
--FIX:Metalua: `--[[x]] f() --[[y]]` returns lineinfo around `f()`.
-- `--[[x]] --[[y]]` returns lineinfo around everything.
--FIX:Metalua: `while 1 do --[[x]] --[[y]] end` returns first > last
-- lineinfo for contained block
--FIX:Metalua: search for "PATCHED:LuaInspect" in the metalualib folder.
--FIX?:Metalua: loadstring parses "--x" but metalua omits the comment in the AST
--FIX?:Metalua: `local x` is generating `Local{{`Id{x}}, {}}`, which
-- has no lineinfo on {}. This is contrary to the Metalua
-- spec: `Local{ {ident+} {expr+}? }.
-- Other things like `self` also generate no lineinfo.
-- The ast2.lineinfo above avoids this.
--FIX:Metalua: Metalua shouldn't overwrite ipairs/pairs. Note: Metalua version
-- doesn't set errorlevel correctly.
--Q:Metalua: Why does `return --[[y]] z --[[x]]` have
-- lineinfo.first.comments, lineinfo.last.comments,
-- plus lineinfo.comments (which is the same as lineinfo.first.comments) ?
--CAUTION:Metalua: `do f() end` returns lineinfo around `do f() end`, while
-- `while 1 do f() end` returns lineinfo around `f()` for inner block.
--CAUTION:Metalua: The lineinfo on Metalua comments is inconsistent with other
-- nodes
--CAUTION:Metalua: lineinfo of table in `f{}` is [3,2], of `f{ x,y }` it's [4,6].
-- This is inconsistent with `x={}` which is [3,4] and `f""` which is [1,2]
-- for the string.
--CAUTION:Metalua: only the `function()` form of `Function includes `function`
-- in lineinfo. 'function' is part of `Localrec and `Set in syntactic sugar form.
--[=[TESTSUITE
-- test longest_prefix/longest_postfix
local function pr(text1, text2)
local lastv
local function same(v)
assert(not lastv or v == lastv); lastv = v; return v
end
local function test1(text1, text2) -- test prefix/postfix
same(longest_prefix(text1, text2))
same(longest_postfix(text1:reverse(), text2:reverse()))
end
local function test2(text1, text2) -- test swap
test1(text1, text2)
test1(text2, text1)
end
for _,extra in ipairs{"", "x", "xy", "xyz"} do -- test extra chars
test2(text1, text2..extra)
test2(text2, text1..extra)
end
return lastv
end
check('==', pr("",""), 0)
check('==', pr("a",""), 0)
check('==', pr("a","a"), 1)
check('==', pr("ab",""), 0)
check('==', pr("ab","a"), 1)
check('==', pr("ab","ab"), 2)
check('==', pr("abcdefg","abcdefgh"), 7)
--]=]
--[=[TESTSUITE
print 'DONE'
--]=]
return M

View File

@ -0,0 +1,390 @@
--[[
compat_env v$(_VERSION) - Lua 5.1/5.2 environment compatibility functions
SYNOPSIS
-- Get load/loadfile compatibility functions only if using 5.1.
local CL = pcall(load, '') and _G or require 'compat_env'
local load = CL.load
local loadfile = CL.loadfile
-- The following now works in both Lua 5.1 and 5.2:
assert(load('return 2*pi', nil, 't', {pi=math.pi}))()
assert(loadfile('ex.lua', 't', {print=print}))()
-- Get getfenv/setfenv compatibility functions only if using 5.2.
local getfenv = _G.getfenv or require 'compat_env'.getfenv
local setfenv = _G.setfenv or require 'compat_env'.setfenv
local function f() return x end
setfenv(f, {x=2})
print(x, getfenv(f).x) --> 2, 2
DESCRIPTION
This module provides Lua 5.1/5.2 environment related compatibility functions.
This includes implementations of Lua 5.2 style `load` and `loadfile`
for use in Lua 5.1. It also includes Lua 5.1 style `getfenv` and `setfenv`
for use in Lua 5.2.
API
local CL = require 'compat_env'
CL.load (ld [, source [, mode [, env] ] ]) --> f [, err]
This behaves the same as the Lua 5.2 `load` in both
Lua 5.1 and 5.2.
http://www.lua.org/manual/5.2/manual.html#pdf-load
CL.loadfile ([filename [, mode [, env] ] ]) --> f [, err]
This behaves the same as the Lua 5.2 `loadfile` in both
Lua 5.1 and 5.2.
http://www.lua.org/manual/5.2/manual.html#pdf-loadfile
CL.getfenv ([f]) --> t
This is identical to the Lua 5.1 `getfenv` in Lua 5.1.
This behaves similar to the Lua 5.1 `getfenv` in Lua 5.2.
When a global environment is to be returned, or when `f` is a
C function, this returns `_G` since Lua 5.2 doesn't have
(thread) global and C function environments. This will also
return `_G` if the Lua function `f` lacks an `_ENV`
upvalue, but it will raise an error if uncertain due to lack of
debug info. It is not normally considered good design to use
this function; when possible, use `load` or `loadfile` instead.
http://www.lua.org/manual/5.1/manual.html#pdf-getfenv
CL.setfenv (f, t)
This is identical to the Lua 5.1 `setfenv` in Lua 5.1.
This behaves similar to the Lua 5.1 `setfenv` in Lua 5.2.
This will do nothing if `f` is a Lua function that
lacks an `_ENV` upvalue, but it will raise an error if uncertain
due to lack of debug info. See also Design Notes below.
It is not normally considered good design to use
this function; when possible, use `load` or `loadfile` instead.
http://www.lua.org/manual/5.1/manual.html#pdf-setfenv
DESIGN NOTES
This module intends to provide robust and fairly complete reimplementations
of the environment related Lua 5.1 and Lua 5.2 functions.
No effort is made, however, to simulate rare or difficult to simulate features,
such as thread environments, although this is liable to change in the future.
Such 5.1 capabilities are discouraged and ideally
removed from 5.1 code, thereby allowing your code to work in both 5.1 and 5.2.
In Lua 5.2, a `setfenv(f, {})`, where `f` lacks any upvalues, will be silently
ignored since there is no `_ENV` in this function to write to, and the
environment will have no effect inside the function anyway. However,
this does mean that `getfenv(setfenv(f, t))` does not necessarily equal `t`,
which is incompatible with 5.1 code (a possible workaround would be [1]).
If `setfenv(f, {})` has an upvalue but no debug info, then this will raise
an error to prevent inadvertently executing potentially untrusted code in the
global environment.
It is not normally considered good design to use `setfenv` and `getfenv`
(one reason they were removed in 5.2). When possible, consider replacing
these with `load` or `loadfile`, which are more restrictive and have native
implementations in 5.2.
This module might be merged into a more general Lua 5.1/5.2 compatibility
library (e.g. a full reimplementation of Lua 5.2 `_G`). However,
`load/loadfile/getfenv/setfenv` perhaps are among the more cumbersome
functions not to have.
INSTALLATION
Download compat_env.lua:
wget https://raw.github.com/gist/1654007/compat_env.lua
Copy compat_env.lua into your LUA_PATH.
Alternately, unpack, test, and install into LuaRocks:
wget https://raw.github.com/gist/1422205/sourceunpack.lua
lua sourceunpack.lua compat_env.lua
(cd out && luarocks make)
Related work
http://lua-users.org/wiki/LuaVersionCompatibility
https://github.com/stevedonovan/Penlight/blob/master/lua/pl/utils.lua
- penlight implementations of getfenv/setfenv
http://lua-users.org/lists/lua-l/2010-06/msg00313.html
- initial getfenv/setfenv implementation
References
[1] http://lua-users.org/lists/lua-l/2010-06/msg00315.html
Copyright
(c) 2012 David Manura. Licensed under the same terms as Lua 5.1/5.2 (MIT license).
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
--]]---------------------------------------------------------------------
local M = {_TYPE='module', _NAME='compat_env', _VERSION='0.2.20120124'}
local function check_chunk_type(s, mode)
local nmode = mode or 'bt'
local is_binary = s and #s > 0 and s:byte(1) == 27
if is_binary and not nmode:match'b' then
return nil, ("attempt to load a binary chunk (mode is '%s')"):format(mode)
elseif not is_binary and not nmode:match't' then
return nil, ("attempt to load a text chunk (mode is '%s')"):format(mode)
end
return true
end
local IS_52_LOAD = pcall(load, '')
if IS_52_LOAD then
M.load = _G.load
M.loadfile = _G.loadfile
else
-- 5.2 style `load` implemented in 5.1
function M.load(ld, source, mode, env)
local f
if type(ld) == 'string' then
local s = ld
local ok, err = check_chunk_type(s, mode); if not ok then return ok, err end
local err; f, err = loadstring(s, source); if not f then return f, err end
elseif type(ld) == 'function' then
local ld2 = ld
if (mode or 'bt') ~= 'bt' then
local first = ld()
local ok, err = check_chunk_type(first, mode); if not ok then return ok, err end
ld2 = function()
if first then
local chunk=first; first=nil; return chunk
else return ld() end
end
end
local err; f, err = load(ld2, source); if not f then return f, err end
else
error(("bad argument #1 to 'load' (function expected, got %s)"):format(type(ld)), 2)
end
if env then setfenv(f, env) end
return f
end
-- 5.2 style `loadfile` implemented in 5.1
function M.loadfile(filename, mode, env)
if (mode or 'bt') ~= 'bt' then
local ioerr
local fh, err = io.open(filename, 'rb'); if not fh then return fh, err end
local function ld() local chunk; chunk,ioerr = fh:read(4096); return chunk end
local f, err = M.load(ld, filename and '@'..filename, mode, env)
fh:close()
if not f then return f, err end
if ioerr then return nil, ioerr end
return f
else
local f, err = loadfile(filename); if not f then return f, err end
if env then setfenv(f, env) end
return f
end
end
end
if _G.setfenv then -- Lua 5.1
M.setfenv = _G.setfenv
M.getfenv = _G.getfenv
else -- >= Lua 5.2
-- helper function for `getfenv`/`setfenv`
local function envlookup(f)
local name, val
local up = 0
local unknown
repeat
up=up+1; name, val = debug.getupvalue(f, up)
if name == '' then unknown = true end
until name == '_ENV' or name == nil
if name ~= '_ENV' then
up = nil
if unknown then error("upvalues not readable in Lua 5.2 when debug info missing", 3) end
end
return (name == '_ENV') and up, val, unknown
end
-- helper function for `getfenv`/`setfenv`
local function envhelper(f, name)
if type(f) == 'number' then
if f < 0 then
error(("bad argument #1 to '%s' (level must be non-negative)"):format(name), 3)
elseif f < 1 then
error("thread environments unsupported in Lua 5.2", 3) --[*]
end
f = debug.getinfo(f+2, 'f').func
elseif type(f) ~= 'function' then
error(("bad argument #1 to '%s' (number expected, got %s)"):format(type(name, f)), 2)
end
return f
end
-- [*] might simulate with table keyed by coroutine.running()
-- 5.1 style `setfenv` implemented in 5.2
function M.setfenv(f, t)
local f = envhelper(f, 'setfenv')
local up, val, unknown = envlookup(f)
if up then
debug.upvaluejoin(f, up, function() return up end, 1) -- unique upvalue [*]
debug.setupvalue(f, up, t)
else
local what = debug.getinfo(f, 'S').what
if what ~= 'Lua' and what ~= 'main' then -- not Lua func
error("'setfenv' cannot change environment of given object", 2)
end -- else ignore no _ENV upvalue (warning: incompatible with 5.1)
end
end
-- [*] http://lua-users.org/lists/lua-l/2010-06/msg00313.html
-- 5.1 style `getfenv` implemented in 5.2
function M.getfenv(f)
if f == 0 or f == nil then return _G end -- simulated behavior
local f = envhelper(f, 'setfenv')
local up, val = envlookup(f)
if not up then return _G end -- simulated behavior [**]
return val
end
-- [**] possible reasons: no _ENV upvalue, C function
end
return M
--[[ FILE rockspec.in
package = 'compat_env'
version = '$(_VERSION)-1'
source = {
url = 'https://raw.github.com/gist/1654007/$(GITID)/compat_env.lua',
--url = 'https://raw.github.com/gist/1654007/compat_env.lua', -- latest raw
--url = 'https://gist.github.com/gists/1654007/download',
md5 = '$(MD5)'
}
description = {
summary = 'Lua 5.1/5.2 environment compatibility functions',
detailed = [=[
Provides Lua 5.1/5.2 environment related compatibility functions.
This includes implementations of Lua 5.2 style `load` and `loadfile`
for use in Lua 5.1. It also includes Lua 5.1 style `getfenv` and `setfenv`
for use in Lua 5.2.
]=],
license = 'MIT/X11',
homepage = 'https://gist.github.com/1654007',
maintainer = 'David Manura'
}
dependencies = {} -- Lua 5.1 or 5.2
build = {
type = 'builtin',
modules = {
['compat_env'] = 'compat_env.lua'
}
}
--]]---------------------------------------------------------------------
--[[ FILE test.lua
-- test.lua - test suite for compat_env module.
local CL = require 'compat_env'
local load = CL.load
local loadfile = CL.loadfile
local setfenv = CL.setfenv
local getfenv = CL.getfenv
local function checkeq(a, b, e)
if a ~= b then error(
'not equal ['..tostring(a)..'] ['..tostring(b)..'] ['..tostring(e)..']')
end
end
local function checkerr(pat, ok, err)
assert(not ok, 'checkerr')
assert(type(err) == 'string' and err:match(pat), err)
end
-- test `load`
checkeq(load('return 2')(), 2)
checkerr('expected near', load'return 2 2')
checkerr('text chunk', load('return 2', nil, 'b'))
checkerr('text chunk', load('', nil, 'b'))
checkerr('binary chunk', load('\027', nil, 't'))
checkeq(load('return 2*x',nil,'bt',{x=5})(), 10)
checkeq(debug.getinfo(load('')).source, '')
checkeq(debug.getinfo(load('', 'foo')).source, 'foo')
-- test `loadfile`
local fh = assert(io.open('tmp.lua', 'wb'))
fh:write('return (...) or x')
fh:close()
checkeq(loadfile('tmp.lua')(2), 2)
checkeq(loadfile('tmp.lua', 't')(2), 2)
checkerr('text chunk', loadfile('tmp.lua', 'b'))
checkeq(loadfile('tmp.lua', nil, {x=3})(), 3)
checkeq(debug.getinfo(loadfile('tmp.lua')).source, '@tmp.lua')
checkeq(debug.getinfo(loadfile('tmp.lua', 't', {})).source, '@tmp.lua')
os.remove'tmp.lua'
-- test `setfenv`/`getfenv`
x = 5
local a,b=true; local function f(c) if a then return x,b,c end end
setfenv(f, {x=3})
checkeq(f(), 3)
checkeq(getfenv(f).x, 3)
checkerr('cannot change', pcall(setfenv, string.len, {})) -- C function
checkeq(getfenv(string.len), _G) -- C function
local function g()
setfenv(1, {x=4})
checkeq(getfenv(1).x, 4)
return x
end
checkeq(g(), 4) -- numeric level
if _G._VERSION ~= 'Lua 5.1' then
checkerr('unsupported', pcall(setfenv, 0, {}))
end
checkeq(getfenv(0), _G)
checkeq(getfenv(), _G) -- no arg
checkeq(x, 5) -- main unaltered
setfenv(function()end, {}) -- no upvalues, ignore
checkeq(getfenv(function()end), _G) -- no upvaluse
if _G._VERSION ~= 'Lua 5.1' then
checkeq(getfenv(setfenv(function()end, {})), _G) -- warning: incompatible with 5.1
end
x = nil
print 'OK'
--]]---------------------------------------------------------------------
--[[ FILE CHANGES.txt
0.2.20120124
Renamed module to compat_env (from compat_load)
Add getfenv/setfenv functions
0.1.20120121
Initial public release
--]]

View File

@ -0,0 +1,90 @@
-- Recursive object dumper, for debugging.
-- (c) 2010 David Manura, MIT License.
local M = {}
-- My own object dumper.
-- Intended for debugging, not serialization, with compact formatting.
-- Robust against recursion.
-- Renders Metalua table tag fields specially {tag=X, ...} --> "`X{...}".
-- On first call, only pass parameter o.
-- CATEGORY: AST debug
local ignore_keys_ = {lineinfo=true}
local norecurse_keys_ = {parent=true, ast=true}
local function dumpstring_key_(k, isseen, newindent)
local ks = type(k) == 'string' and k:match'^[%a_][%w_]*$' and k or
'[' .. M.dumpstring(k, isseen, newindent) .. ']'
return ks
end
local function sort_keys_(a, b)
if type(a) == 'number' and type(b) == 'number' then
return a < b
elseif type(a) == 'number' then
return false
elseif type(b) == 'number' then
return true
elseif type(a) == 'string' and type(b) == 'string' then
return a < b
else
return tostring(a) < tostring(b) -- arbitrary
end
end
function M.dumpstring(o, isseen, indent, key)
isseen = isseen or {}
indent = indent or ''
if type(o) == 'table' then
if isseen[o] or norecurse_keys_[key] then
return (type(o.tag) == 'string' and '`' .. o.tag .. ':' or '') .. tostring(o)
else isseen[o] = true end -- avoid recursion
local used = {}
local tag = o.tag
local s = '{'
if type(o.tag) == 'string' then
s = '`' .. tag .. s; used['tag'] = true
end
local newindent = indent .. ' '
local ks = {}; for k in pairs(o) do ks[#ks+1] = k end
table.sort(ks, sort_keys_)
--for i,k in ipairs(ks) do print ('keys', k) end
local forcenummultiline
for k in pairs(o) do
if type(k) == 'number' and type(o[k]) == 'table' then forcenummultiline = true end
end
-- inline elements
for _,k in ipairs(ks) do
if used[k] then -- skip
elseif ignore_keys_[k] then used[k] = true
elseif (type(k) ~= 'number' or not forcenummultiline) and
type(k) ~= 'table' and (type(o[k]) ~= 'table' or norecurse_keys_[k])
then
s = s .. dumpstring_key_(k, isseen, newindent) .. '=' .. M.dumpstring(o[k], isseen, newindent, k) .. ', '
used[k] = true
end
end
-- elements on separate lines
local done
for _,k in ipairs(ks) do
if not used[k] then
if not done then s = s .. '\n'; done = true end
s = s .. newindent .. dumpstring_key_(k, isseen) .. '=' .. M.dumpstring(o[k], isseen, newindent, k) .. ',\n'
end
end
s = s:gsub(',(%s*)$', '%1')
s = s .. (done and indent or '') .. '}'
return s
elseif type(o) == 'string' then
return string.format('%q', o)
else
return tostring(o)
end
end
return M

View File

@ -0,0 +1,222 @@
-- LuaInspect.globals - identifier scope analysis
-- Locates locals, globals, and their definitions.
--
-- (c) D.Manura, 2008-2010, MIT license.
-- based on http://lua-users.org/wiki/DetectingUndefinedVariables
local M = {}
--! require 'luainspect.typecheck' (context)
local LA = require "luainspect.ast"
local function definelocal(scope, name, ast)
if scope[name] then
scope[name].localmasked = true
ast.localmasking = scope[name]
end
scope[name] = ast
if name == '_' then ast.isignore = true end
end
-- Resolves scoping and usages of variable in AST.
-- Data Notes:
-- ast.localdefinition refers to lexically scoped definition of `Id node `ast`.
-- If ast.localdefinition == ast then ast is a "lexical definition".
-- If ast.localdefinition == nil, then variable is global.
-- ast.functionlevel is the number of functions the AST is contained in.
-- ast.functionlevel is defined iff ast is a lexical definition.
-- ast.isparam is true iff ast is a lexical definition and a function parameter.
-- ast.isset is true iff ast is a lexical definition and exists an assignment on it.
-- ast.isused is true iff ast is a lexical definition and has been referred to.
-- ast.isignore is true if local variable should be ignored (e.g. typically "_")
-- ast.localmasking - for a lexical definition, this is set to the lexical definition
-- this is masking (i.e. same name). nil if not masking.
-- ast.localmasked - true iff lexical definition masked by another lexical definition.
-- ast.isfield is true iff `String node ast is used for field access on object,
-- e.g. x.y or x['y'].z
-- ast.previous - For `Index{o,s} or `Invoke{o,s,...}, s.previous == o
local function traverse(ast, scope, globals, level, functionlevel)
scope = scope or {}
local blockrecurse
ast.level = level
-- operations on walking down the AST
if ast.tag == 'Local' then
blockrecurse = 1
-- note: apply new scope after processing values
elseif ast.tag == 'Localrec' then
local namelist_ast, valuelist_ast = ast[1], ast[2]
for _,value_ast in ipairs(namelist_ast) do
assert(value_ast.tag == 'Id')
local name = value_ast[1]
local parentscope = getmetatable(scope).__index
definelocal(parentscope, name, value_ast)
value_ast.localdefinition = value_ast
value_ast.functionlevel = functionlevel
value_ast.level = level+1
end
blockrecurse = 1
elseif ast.tag == 'Id' then
local name = ast[1]
if scope[name] then
ast.localdefinition = scope[name]
ast.functionlevel = functionlevel
scope[name].isused = true
else -- global, do nothing
end
elseif ast.tag == 'Function' then
local paramlist_ast, body_ast = ast[1], ast[2]
functionlevel = functionlevel + 1
for _,param_ast in ipairs(paramlist_ast) do
local name = param_ast[1]
assert(param_ast.tag == 'Id' or param_ast.tag == 'Dots')
if param_ast.tag == 'Id' then
definelocal(scope, name, param_ast)
param_ast.localdefinition = param_ast
param_ast.functionlevel = functionlevel
param_ast.isparam = true
end
param_ast.level = level+1
end
blockrecurse = 1
elseif ast.tag == 'Set' then
local reflist_ast, valuelist_ast = ast[1], ast[2]
for _,ref_ast in ipairs(reflist_ast) do
if ref_ast.tag == 'Id' then
local name = ref_ast[1]
if scope[name] then
scope[name].isset = true
else
if not globals[name] then
globals[name] = {set=ref_ast}
end
end
end
ref_ast.level = level+1
end
--ENHANCE? We could differentiate assignments to x (which indicates that
-- x is not const) and assignments to a member of x (which indicates that
-- x is not a pointer to const) and assignments to any nested member of x
-- (which indicates that x it not a transitive const).
elseif ast.tag == 'Fornum' then
blockrecurse = 1
elseif ast.tag == 'Forin' then
blockrecurse = 1
end
-- recurse (depth-first search down the AST)
if ast.tag == 'Repeat' then
local block_ast, cond_ast = ast[1], ast[2]
local scope = scope
for _,stat_ast in ipairs(block_ast) do
scope = setmetatable({}, {__index = scope})
traverse(stat_ast, scope, globals, level+1, functionlevel)
end
scope = setmetatable({}, {__index = scope})
traverse(cond_ast, scope, globals, level+1, functionlevel)
elseif ast.tag == 'Fornum' then
local name_ast, block_ast = ast[1], ast[#ast]
-- eval value list in current scope
for i=2, #ast-1 do traverse(ast[i], scope, globals, level+1, functionlevel) end
-- eval body in next scope
local name = name_ast[1]
definelocal(scope, name, name_ast)
name_ast.localdefinition = name_ast
name_ast.functionlevel = functionlevel
traverse(block_ast, scope, globals, level+1, functionlevel)
elseif ast.tag == 'Forin' then
local namelist_ast, vallist_ast, block_ast = ast[1], ast[2], ast[3]
-- eval value list in current scope
traverse(vallist_ast, scope, globals, level+1, functionlevel)
-- eval body in next scope
for _,name_ast in ipairs(namelist_ast) do
local name = name_ast[1]
definelocal(scope, name, name_ast)
name_ast.localdefinition = name_ast
name_ast.functionlevel = functionlevel
name_ast.level = level+1
end
traverse(block_ast, scope, globals, level+1, functionlevel)
else -- normal
for i,v in ipairs(ast) do
if i ~= blockrecurse and type(v) == 'table' then
local scope = setmetatable({}, {__index = scope})
traverse(v, scope, globals, level+1, functionlevel)
end
end
end
-- operations on walking up the AST
if ast.tag == 'Local' then
-- Unlike Localrec, variables come into scope after evaluating values.
local namelist_ast, valuelist_ast = ast[1], ast[2]
for _,name_ast in ipairs(namelist_ast) do
assert(name_ast.tag == 'Id')
local name = name_ast[1]
local parentscope = getmetatable(scope).__index
definelocal(parentscope, name, name_ast)
name_ast.localdefinition = name_ast
name_ast.functionlevel = functionlevel
name_ast.level = level+1
end
elseif ast.tag == 'Index' then
if ast[2].tag == 'String' then
ast[2].isfield = true
ast[2].previous = ast[1]
end
elseif ast.tag == 'Invoke' then
assert(ast[2].tag == 'String')
ast[2].isfield = true
ast[2].previous = ast[1]
end
end
function M.globals(ast)
-- Default list of defined variables.
local scope = setmetatable({}, {})
local globals = {}
traverse(ast, scope, globals, 1, 1) -- Start check.
return globals
end
-- Gets locals in scope of statement of block ast. If isafter is true and ast is statement,
-- uses scope just after statement ast.
-- Assumes 'parent' attributes on ast are marked.
-- Returns table mapping name -> AST local definition.
function M.variables_in_scope(ast, isafter)
local scope = {}
local cast = ast
while cast.parent do
local midx = LA.ast_idx(cast.parent, cast)
for idx=1,midx do
local bast = cast.parent[idx]
if bast.tag == 'Localrec' or bast.tag == 'Local' and (idx < midx or isafter) then
local names_ast = bast[1]
for bidx=1,#names_ast do
local name_ast = names_ast[bidx]
local name = name_ast[1]
scope[name] = name_ast
end
elseif cast ~= ast and (bast.tag == 'For' or bast.tag == 'Forin' or bast.tag == 'Function') then
local names_ast = bast[1]
for bidx=1,#names_ast do
local name_ast = names_ast[bidx]
if name_ast.tag == 'Id' then --Q: or maybe `Dots should be included
local name = name_ast[1]
scope[name] = name_ast
end
end
end
end
cast = cast.parent
end
return scope
end
return M

View File

@ -0,0 +1,433 @@
local M = {}
local T = require "luainspect.types"
-- signatures of known globals
M.global_signatures = {
assert = "assert (v [, message])",
collectgarbage = "collectgarbage (opt [, arg])",
dofile = "dofile (filename)",
error = "error (message [, level])",
_G = "(table)",
getfenv = "getfenv ([f])",
getmetatable = "getmetatable (object)",
ipairs = "ipairs (t)",
load = "load (func [, chunkname])",
loadfile = "loadfile ([filename])",
loadstring = "loadstring (string [, chunkname])",
next = "next (table [, index])",
pairs = "pairs (t)",
pcall = "pcall (f, arg1, ...)",
print = "print (...)",
rawequal = "rawequal (v1, v2)",
rawget = "rawget (table, index)",
rawset = "rawset (table, index, value)",
select = "select (index, ...)",
setfenv = "setfenv (f, table)",
setmetatable = "setmetatable (table, metatable)",
tonumber = "tonumber (e [, base])",
tostring = "tostring (e)",
type = "type (v)",
unpack = "unpack (list [, i [, j]])",
_VERSION = "(string)",
xpcall = "xpcall (f, err)",
module = "module (name [, ...])",
require = "require (modname)",
coroutine = "(table) coroutine manipulation library",
debug = "(table) debug facilities library",
io = "(table) I/O library",
math = "(table) math functions libary",
os = "(table) OS facilities library",
package = "(table) package library",
string = "(table) string manipulation library",
table = "(table) table manipulation library",
["coroutine.create"] = "coroutine.create (f)",
["coroutine.resume"] = "coroutine.resume (co [, val1, ...])",
["coroutine.running"] = "coroutine.running ()",
["coroutine.status"] = "coroutine.status (co)",
["coroutine.wrap"] = "coroutine.wrap (f)",
["coroutine.yield"] = "coroutine.yield (...)",
["debug.debug"] = "debug.debug ()",
["debug.getfenv"] = "debug.getfenv (o)",
["debug.gethook"] = "debug.gethook ([thread])",
["debug.getinfo"] = "debug.getinfo ([thread,] function [, what])",
["debug.getlocal"] = "debug.getlocal ([thread,] level, local)",
["debug.getmetatable"] = "debug.getmetatable (object)",
["debug.getregistry"] = "debug.getregistry ()",
["debug.getupvalue"] = "debug.getupvalue (func, up)",
["debug.setfenv"] = "debug.setfenv (object, table)",
["debug.sethook"] = "debug.sethook ([thread,] hook, mask [, count])",
["debug.setlocal"] = "debug.setlocal ([thread,] level, local, value)",
["debug.setmetatable"] = "debug.setmetatable (object, table)",
["debug.setupvalue"] = "debug.setupvalue (func, up, value)",
["debug.traceback"] = "debug.traceback ([thread,] [message] [, level])",
["io.close"] = "io.close ([file])",
["io.flush"] = "io.flush ()",
["io.input"] = "io.input ([file])",
["io.lines"] = "io.lines ([filename])",
["io.open"] = "io.open (filename [, mode])",
["io.output"] = "io.output ([file])",
["io.popen"] = "io.popen (prog [, mode])",
["io.read"] = "io.read (...)",
["io.tmpfile"] = "io.tmpfile ()",
["io.type"] = "io.type (obj)",
["io.write"] = "io.write (...)",
["math.abs"] = "math.abs (x)",
["math.acos"] = "math.acos (x)",
["math.asin"] = "math.asin (x)",
["math.atan"] = "math.atan (x)",
["math.atan2"] = "math.atan2 (y, x)",
["math.ceil"] = "math.ceil (x)",
["math.cos"] = "math.cos (x)",
["math.cosh"] = "math.cosh (x)",
["math.deg"] = "math.deg (x)",
["math.exp"] = "math.exp (x)",
["math.floor"] = "math.floor (x)",
["math.fmod"] = "math.fmod (x, y)",
["math.frexp"] = "math.frexp (x)",
["math.huge"] = "math.huge",
["math.ldexp"] = "math.ldexp (m, e)",
["math.log"] = "math.log (x)",
["math.log10"] = "math.log10 (x)",
["math.max"] = "math.max (x, ...)",
["math.min"] = "math.min (x, ...)",
["math.modf"] = "math.modf (x)",
["math.pi"] = "math.pi",
["math.pow"] = "math.pow (x, y)",
["math.rad"] = "math.rad (x)",
["math.random"] = "math.random ([m [, n]])",
["math.randomseed"] = "math.randomseed (x)",
["math.sin"] = "math.sin (x)",
["math.sinh"] = "math.sinh (x)",
["math.sqrt"] = "math.sqrt (x)",
["math.tan"] = "math.tan (x)",
["math.tanh"] = "math.tanh (x)",
["os.clock"] = "os.clock ()",
["os.date"] = "os.date ([format [, time]])",
["os.difftime"] = "os.difftime (t2, t1)",
["os.execute"] = "os.execute ([command])",
["os.exit"] = "os.exit ([code])",
["os.getenv"] = "os.getenv (varname)",
["os.remove"] = "os.remove (filename)",
["os.rename"] = "os.rename (oldname, newname)",
["os.setlocale"] = "os.setlocale (locale [, category])",
["os.time"] = "os.time ([table])",
["os.tmpname"] = "os.tmpname ()",
["package.cpath"] = "package.cpath",
["package.loaded"] = "package.loaded",
["package.loaders"] = "package.loaders",
["package.loadlib"] = "package.loadlib (libname, funcname)",
["package.path"] = "package.path",
["package.preload"] = "package.preload",
["package.seeall"] = "package.seeall (module)",
["string.byte"] = "string.byte (s [, i [, j]])",
["string.char"] = "string.char (...)",
["string.dump"] = "string.dump (function)",
["string.find"] = "string.find (s, pattern [, init [, plain]])",
["string.format"] = "string.format (formatstring, ...)",
["string.gmatch"] = "string.gmatch (s, pattern)",
["string.gsub"] = "string.gsub (s, pattern, repl [, n])",
["string.len"] = "string.len (s)",
["string.lower"] = "string.lower (s)",
["string.match"] = "string.match (s, pattern [, init])",
["string.rep"] = "string.rep (s, n)",
["string.reverse"] = "string.reverse (s)",
["string.sub"] = "string.sub (s, i [, j])",
["string.upper"] = "string.upper (s)",
["table.concat"] = "table.concat (table [, sep [, i [, j]]])",
["table.insert"] = "table.insert (table, [pos,] value)",
["table.maxn"] = "table.maxn (table)",
["table.remove"] = "table.remove (table [, pos])",
["table.sort"] = "table.sort (table [, comp])",
}
-- utility function. Converts e.g. name 'math.sqrt' to its value.
local function resolve_global_helper_(name)
local o = _G
for fieldname in name:gmatch'[^%.]+' do o = o[fieldname] end
return o
end
local function resolve_global(name)
local a, b = pcall(resolve_global_helper_, name)
if a then return b else return nil, b end
end
-- Same as global_signatures but maps value (not name) to signature.
M.value_signatures = {}
local isobject = {['function']=true, ['table']=true, ['userdata']=true, ['coroutine']=true}
for name,sig in pairs(M.global_signatures) do
local val, err = resolve_global(name)
if isobject[type(val)] then
M.value_signatures[val] = sig
end
end
-- min,max argument counts.
M.argument_counts = {
[assert] = {1,2},
[collectgarbage] = {1,2},
[dofile] = {1},
[error] = {1,2},
[getfenv or false] = {0,1},
[getmetatable] = {1,1},
[ipairs] = {1,1},
[load] = {1,2},
[loadfile] = {0,1},
[loadstring] = {1,2},
[next] = {1,2},
[pairs] = {1,1},
[pcall] = {1,math.huge},
[print] = {0,math.huge},
[rawequal] = {2,2},
[rawget] = {2,2},
[rawset] = {3,3},
[select] = {1, math.huge},
[setfenv or false] = {2,2},
[setmetatable] = {2,2},
[tonumber] = {1,2},
[tostring] = {1},
[type] = {1},
[unpack] = {1,3},
[xpcall] = {2,2},
[module] = {1,math.huge},
[require] = {1,1},
[coroutine.create] = {1,1},
[coroutine.resume] = {1, math.huge},
[coroutine.running] = {0,0},
[coroutine.status] = {1,1},
[coroutine.wrap] = {1,1},
[coroutine.yield] = {0,math.huge},
[debug.debug] = {0,0},
[debug.getfenv or false] = {1,1},
[debug.gethook] = {0,1},
[debug.getinfo] = {1,3},
[debug.getlocal] = {2,3},
[debug.getmetatable] = {1,1},
[debug.getregistry] = {0,0},
[debug.getupvalue] = {2,2},
[debug.setfenv or false] = {2,2},
[debug.sethook] = {2,4},
[debug.setlocal] = {3,4},
[debug.setmetatable] = {2,2},
[debug.setupvalue] = {3,3},
[debug.traceback] = {0,3},
[io.close] = {0,1},
[io.flush] = {0,0},
[io.input] = {0,1},
[io.lines] = {0,1},
[io.open] = {1,2},
[io.output] = {0,1},
[io.popen] = {1,2},
[io.read] = {0,math.huge},
[io.tmpfile] = {0},
[io.type] = {1},
[io.write] = {0,math.huge},
[math.abs] = {1},
[math.acos] = {1},
[math.asin] = {1},
[math.atan] = {1},
[math.atan2] = {2,2},
[math.ceil] = {1,1},
[math.cos] = {1,1},
[math.cosh] = {1,1},
[math.deg] = {1,1},
[math.exp] = {1,1},
[math.floor] = {1,1},
[math.fmod] = {2,2},
[math.frexp] = {1,1},
[math.ldexp] = {2,2},
[math.log] = {1,1},
[math.log10] = {1,1},
[math.max] = {1,math.huge},
[math.min] = {1,math.huge},
[math.modf] = {1,1},
[math.pow] = {2,2},
[math.rad] = {1,1},
[math.random] = {0,2},
[math.randomseed] = {1,1},
[math.sin] = {1,1},
[math.sinh] = {1,1},
[math.sqrt] = {1,1},
[math.tan] = {1,1},
[math.tanh] = {1,1},
[os.clock] = {0,0},
[os.date] = {0,2},
[os.difftime] = {2,2},
[os.execute] = {0,1},
[os.exit] = {0,1},
[os.getenv] = {1,1},
[os.remove] = {1,1},
[os.rename] = {2,2},
[os.setlocale] = {1,2},
[os.time] = {0,1},
[os.tmpname] = {0,0},
[package.loadlib] = {2,2},
[package.seeall] = {1,1},
[string.byte] = {1,3},
[string.char] = {0,math.huge},
[string.dump] = {1,1},
[string.find] = {2,4},
[string.format] = {1,math.huge},
[string.gmatch] = {2,2},
[string.gsub] = {3,4},
[string.len] = {1,1},
[string.lower] = {1,1},
[string.match] = {2,3},
[string.rep] = {2,2},
[string.reverse] = {1,1},
[string.sub] = {2,3},
[string.upper] = {1,1},
[table.concat] = {1,4},
[table.insert] = {2,3},
[table.maxn] = {1,1},
[table.remove] = {1,2},
[table.sort] = {1,2},
[false] = nil -- trick (relies on potentially undefined behavior)
}
-- functions with zero or nearly zero side-effects, and with deterministic results, that may be evaluated by the analyzer.
M.safe_function = {
[require] = true,
[rawequal] = true,
[rawget] = true,
[require] = true, -- sort of
[select] = true,
[tonumber] = true,
[tostring] = true,
[type] = true,
[unpack] = true,
[coroutine.create] = true,
-- [coroutine.resume]
[coroutine.running] = true,
[coroutine.status] = true,
[coroutine.wrap] = true,
--[coroutine.yield]
-- [debug.debug]
--[debug.getfenv] = true,
[debug.gethook] = true,
[debug.getinfo] = true,
[debug.getlocal] = true,
[debug.getmetatable] = true,
[debug.getregistry] = true,
[debug.getupvalue] = true,
-- [debug.setfenv]
-- [debug.sethook]
-- [debug.setlocal]
-- [debug.setmetatable]
-- [debug.setupvalue]
-- [debug.traceback] = true,
[io.type] = true,
-- skip all other io.*
[math.abs] = true,
[math.acos] = true,
[math.asin] = true,
[math.atan] = true,
[math.atan2] = true,
[math.ceil] = true,
[math.cos] = true,
[math.cosh] = true,
[math.deg] = true,
[math.exp] = true,
[math.floor] = true,
[math.fmod] = true,
[math.frexp] = true,
[math.ldexp] = true,
[math.log] = true,
[math.log10] = true,
[math.max] = true,
[math.min] = true,
[math.modf] = true,
[math.pow] = true,
[math.rad] = true,
--[math.random]
--[math.randomseed]
[math.sin] = true,
[math.sinh] = true,
[math.sqrt] = true,
[math.tan] = true,
[math.tanh] = true,
[os.clock] = true, -- safe but non-deterministic
[os.date] = true,-- safe but non-deterministic
[os.difftime] = true,
--[os.execute]
--[os.exit]
[os.getenv] = true, -- though depends on environment
--[os.remove]
--[os.rename]
--[os.setlocale]
[os.time] = true, -- safe but non-deterministic
--[os.tmpname]
[string.byte] = true,
[string.char] = true,
[string.dump] = true,
[string.find] = true,
[string.format] = true,
[string.gmatch] = true,
[string.gsub] = true,
[string.len] = true,
[string.lower] = true,
[string.match] = true,
[string.rep] = true,
[string.reverse] = true,
[string.sub] = true,
[string.upper] = true,
[table.maxn] = true,
}
M.mock_functions = {}
-- TODO:IMPROVE
local function mockfunction(func, ...)
local inputs = {n=0}
local outputs = {n=0}
local isoutputs
for i=1,select('#', ...) do
local v = select(i, ...)
if type(v) == 'table' then v = v[1] end
if v == 'N' or v == 'I' then v = T.number end
if v == '->' then
isoutputs = true
elseif isoutputs then
outputs[#outputs+1] = v; outputs.n = outputs.n + 1
else
inputs[#inputs+1] = v; inputs.n = inputs.n + 1
end
end
M.mock_functions[func] = {inputs=inputs, outputs=outputs}
end
mockfunction(math.abs, 'N', '->', {'N',0,math.huge})
mockfunction(math.acos, {'N',-1,1}, '->', {'N',0,math.pi/2})
mockfunction(math.asin, {'N',-1,1}, '->', {'N',-math.pi/2,math.pi/2})
mockfunction(math.atan, {'N',-math.huge,math.huge}, '->',
{'N',-math.pi/2,math.pi/2})
--FIX atan2
mockfunction(math.ceil, 'N','->','I')
mockfunction(math.cos, 'N','->',{'N',-1,1})
mockfunction(math.cosh, 'N','->',{'N',1,math.huge})
mockfunction(math.deg, 'N','->','N')
mockfunction(math.exp, 'N','->',{'N',0,math.huge})
mockfunction(math.floor, 'N','->','I')
mockfunction(math.fmod, 'N','N','->','N')
mockfunction(math.frexp, 'N','->',{'N',-1,1},'->','I')
mockfunction(math.ldexp, {'N','I'},'->','N')
mockfunction(math.log, {'N',0,math.huge},'->','N')
mockfunction(math.log10, {'N',0,math.huge},'->','N')
-- function max(...) print 'NOT IMPL'end
-- function min(...) print 'NOT IMPL'end
mockfunction(math.modf, 'N','->','I',{'N',-1,1})
mockfunction(math.pow, 'N','N','->','N') -- improve?
mockfunction(math.rad, 'N','->','N')
-- random = function() print 'NOT IMPL' end
mockfunction(math.randomseed, 'N')
mockfunction(math.sin, 'N','->',{'N',-1,1})
mockfunction(math.sinh, 'N','->','N')
mockfunction(math.sqrt, {'N',0,math.huge},'->',{'N',0,math.huge})
mockfunction(math.tan, 'N','->','N') -- improve?
mockfunction(math.tanh, 'N','->',{'N',-1,1})
return M

View File

@ -0,0 +1,40 @@
-- luainspect.typecheck - Type definitions used to check LuaInspect itself.
--
-- (c) 2010 David Manura, MIT License.
local T = require "luainspect.types"
local ast_mt = {__tostring = function(s) return 'AST' end}
return function(context)
-- AST type.
local ast = T.table {
tag = T.string,
lineinfo=T.table{first=T.table{comments=T.table{T.table{T.string,T.number,T.number}},T.number,T.number,T.number,T.string},
ast=T.table{comments=T.table{T.table{T.string,T.number,T.number}},T.number,T.number,T.number,T.string}},
isfield=T.boolean, tag2=T.string,
value=T.universal, valueself=T.number, valuelist=T.table{n=T.number, isvaluepegged=T.boolean},
resolvedname=T.string, definedglobal=T.boolean, id=T.number, isparam=T.boolean, isset=T.boolean, isused=T.boolean,
isignore=T.boolean,
functionlevel=T.number, localmasked=T.boolean, note=T.string, nocollect=T.table{}, isdead=T.boolean}
-- FIX: some of these are "boolean or nil" actually
ast.localdefinition=ast; ast.localmasking = ast
ast.previous = ast; ast.parent = ast
ast.seevalue = ast; ast.seenote=ast
setmetatable(ast, ast_mt)
ast[1] = ast; ast[2] = ast
context.apply_value('ast$', ast)
-- Token type.
context.apply_value('token$', T.table{
tag=T.string, fpos=T.number, lpos=T.number, keywordid=T.number, ast=ast, [1]=T.string
})
-- Lua source code string type.
context.apply_value('src$', '')
-- SciTE syler object type.
local nf = function()end
context.apply_value('^styler$', T.table{SetState=nf, More=nf, Current=nf, Forward=nf, StartStyling=nf, EndStyling=nf, language=T.string})
end

View File

@ -0,0 +1,130 @@
local T = {} -- types
-- istype[o] iff o represents a type (i.e. set of values)
T.istype = {}
-- iserror[o] iff o represents an error type (created via T.error).
T.iserror = {}
-- istabletype[o] iff o represents a table type (created by T.table).
T.istabletype = {}
-- Number type
T.number = {}
setmetatable(T.number, T.number)
function T.number.__tostring(self)
return 'number'
end
T.istype[T.number] = true
-- String type
T.string = {}
setmetatable(T.string, T.string)
function T.string.__tostring(self)
return 'string'
end
T.istype[T.string] = true
-- Boolean type
T.boolean = {}
setmetatable(T.boolean, T.boolean)
function T.boolean.__tostring(self)
return 'boolean'
end
T.istype[T.boolean] = true
-- Table type
function T.table(t)
T.istype[t] = true
T.istabletype[t] = true
return t
end
-- Universal type. This is a superset of all other types.
T.universal = {}
setmetatable(T.universal, T.universal)
function T.universal.__tostring(self)
return 'unknown'
end
T.istype[T.universal] = true
-- nil type. Represents `nil` but can be stored in tables.
T['nil'] = {}
setmetatable(T['nil'], T['nil'])
T['nil'].__tostring = function(self)
return 'nil'
end
T.istype[T['nil']] = true
-- None type. Represents a non-existent value, in a similar way
-- that `none` is used differently from `nil` in the Lua C API.
T.none = {}
setmetatable(T.none, T.none)
function T.none.__tostring(self)
return 'none'
end
T.istype[T.none] = true
-- Error type
local CError = {}; CError.__index = CError
function CError.__tostring(self) return "error:" .. tostring(self.value) end
function T.error(val)
local self = setmetatable({value=val}, CError)
T.istype[self] = true
T.iserror[self] = true
return self
end
-- Gets a type that is a superset of the two given types.
function T.superset_types(a, b)
if T.iserror[a] then return a end
if T.iserror[b] then return b end
if rawequal(a, b) then -- note: including nil == nil
return a
elseif type(a) == 'string' or a == T.string then
if type(b) == 'string' or b == T.string then
return T.string
else
return T.universal
end
elseif type(a) == 'number' or a == T.number then
if type(b) == 'number' or b == T.number then
return T.number
else
return T.universal
end
elseif type(a) == 'boolean' or a == T.boolean then
if type(b) == 'boolean' or b == T.boolean then
return T.boolean
else
return T.universal
end
else
return T.universal -- IMPROVE
end
end
--[[TESTS:
assert(T.superset_types(2, 2) == 2)
assert(T.superset_types(2, 3) == T.number)
assert(T.superset_types(2, T.number) == T.number)
assert(T.superset_types(T.number, T.string) == T.universal)
print 'DONE'
--]]
-- Determines whether type `o` certainly evaluates to true (true),
-- certainly evaluates to false (false) or could evaluate to either
-- true of false ('?').
function T.boolean_cast(o)
if T.iserror[o] then -- special case
return '?'
elseif o == nil or o == false or o == T['nil'] then -- all subsets of {nil, false}
return false
elseif o == T.universal or o == T.boolean then -- all supersets of boolean
return '?'
else -- all subsets of universal - {nil, false}
return true
end
end
return T

View File

@ -0,0 +1,162 @@
---------------------------------------------------------------------------
-- Copyright (c) 2006-2013 Fabien Fleutot and others.
--
-- All rights reserved.
--
-- This program and the accompanying materials are made available
-- under the terms of the Eclipse Public License v1.0 which
-- accompanies this distribution, and is available at
-- http://www.eclipse.org/legal/epl-v10.html
--
-- This program and the accompanying materials are also made available
-- under the terms of the MIT public license which accompanies this
-- distribution, and is available at http://www.lua.org/license.html
--
-- Contributors:
-- Fabien Fleutot - API and implementation
--
--------------------------------------------------------------------------------
--------------------------------------------------------------------------------
--
-- Convert between various code representation formats. Atomic
-- converters are written in extenso, others are composed automatically
-- by chaining the atomic ones together in a closure.
--
-- Supported formats are:
--
-- * srcfile: the name of a file containing sources.
-- * src: these sources as a single string.
-- * lexstream: a stream of lexemes.
-- * ast: an abstract syntax tree.
-- * proto: a (Yueliang) struture containing a high level
-- representation of bytecode. Largely based on the
-- Proto structure in Lua's VM
-- * bytecode: a string dump of the function, as taken by
-- loadstring() and produced by string.dump().
-- * function: an executable lua function in RAM.
--
--------------------------------------------------------------------------------
require 'checks'
local M = { }
--------------------------------------------------------------------------------
-- Order of the transformations. if 'a' is on the left of 'b', then a 'a' can
-- be transformed into a 'b' (but not the other way around).
-- M.sequence goes for numbers to format names, M.order goes from format
-- names to numbers.
--------------------------------------------------------------------------------
M.sequence = {
'srcfile', 'src', 'lexstream', 'ast', 'proto', 'bytecode', 'function' }
local arg_types = {
srcfile = { 'string', '?string' },
src = { 'string', '?string' },
lexstream = { 'lexer.stream', '?string' },
ast = { 'table', '?string' },
proto = { 'table', '?string' },
bytecode = { 'string', '?string' },
}
M.order= { }; for a,b in pairs(M.sequence) do M.order[b]=a end
local CONV = { } -- conversion metatable __index
function CONV :srcfile_to_src(x, name)
checks('metalua.compiler', 'string', '?string')
name = name or '@'..x
local f, msg = io.open (x, 'rb')
if not f then error(msg) end
local r, msg = f :read '*a'
if not r then error("Cannot read file '"..x.."': "..msg) end
f :close()
return r, name
end
function CONV :src_to_lexstream(src, name)
checks('metalua.compiler', 'string', '?string')
local r = self.parser.lexer :newstream (src, name)
return r, name
end
function CONV :lexstream_to_ast(lx, name)
checks('metalua.compiler', 'lexer.stream', '?string')
local r = self.parser.chunk(lx)
r.source = name
return r, name
end
local bytecode_compiler = nil -- cache to avoid repeated `pcall(require(...))`
local function get_bytecode_compiler()
if bytecode_compiler then return bytecode_compiler else
local status, result = pcall(require, 'metalua.compiler.bytecode')
if status then
bytecode_compiler = result
return result
elseif string.match(result, "not found") then
error "Compilation only available with full Metalua"
else error (result) end
end
end
function CONV :ast_to_proto(ast, name)
checks('metalua.compiler', 'table', '?string')
return get_bytecode_compiler().ast_to_proto(ast, name), name
end
function CONV :proto_to_bytecode(proto, name)
return get_bytecode_compiler().proto_to_bytecode(proto), name
end
function CONV :bytecode_to_function(bc, name)
checks('metalua.compiler', 'string', '?string')
return loadstring(bc, name)
end
-- Create all sensible combinations
for i=1,#M.sequence do
local src = M.sequence[i]
for j=i+2, #M.sequence do
local dst = M.sequence[j]
local dst_name = src.."_to_"..dst
local my_arg_types = arg_types[src]
local functions = { }
for k=i, j-1 do
local name = M.sequence[k].."_to_"..M.sequence[k+1]
local f = assert(CONV[name], name)
table.insert (functions, f)
end
CONV[dst_name] = function(self, a, b)
checks('metalua.compiler', unpack(my_arg_types))
for _, f in ipairs(functions) do
a, b = f(self, a, b)
end
return a, b
end
--printf("Created M.%s out of %s", dst_name, table.concat(n, ', '))
end
end
--------------------------------------------------------------------------------
-- This one goes in the "wrong" direction, cannot be composed.
--------------------------------------------------------------------------------
function CONV :function_to_bytecode(...) return string.dump(...) end
function CONV :ast_to_src(...)
require 'metalua.loader' -- ast_to_string isn't written in plain lua
return require 'metalua.compiler.ast_to_src' (...)
end
local MT = { __index=CONV, __type='metalua.compiler' }
function M.new()
local parser = require 'metalua.compiler.parser' .new()
local self = { parser = parser }
setmetatable(self, MT)
return self
end
return M

View File

@ -0,0 +1,42 @@
--------------------------------------------------------------------------------
-- Copyright (c) 2006-2013 Fabien Fleutot and others.
--
-- All rights reserved.
--
-- This program and the accompanying materials are made available
-- under the terms of the Eclipse Public License v1.0 which
-- accompanies this distribution, and is available at
-- http://www.eclipse.org/legal/epl-v10.html
--
-- This program and the accompanying materials are also made available
-- under the terms of the MIT public license which accompanies this
-- distribution, and is available at http://www.lua.org/license.html
--
-- Contributors:
-- Fabien Fleutot - API and implementation
--
--------------------------------------------------------------------------------
-- Export all public APIs from sub-modules, squashed into a flat spacename
local MT = { __type='metalua.compiler.parser' }
local MODULE_REL_NAMES = { "annot.grammar", "expr", "meta", "misc",
"stat", "table", "ext" }
local function new()
local M = {
lexer = require "metalua.compiler.parser.lexer" ();
extensions = { } }
for _, rel_name in ipairs(MODULE_REL_NAMES) do
local abs_name = "metalua.compiler.parser."..rel_name
local extender = require (abs_name)
if not M.extensions[abs_name] then
if type (extender) == 'function' then extender(M) end
M.extensions[abs_name] = extender
end
end
return setmetatable(M, MT)
end
return { new = new }

View File

@ -0,0 +1,48 @@
--------------------------------------------------------------------------------
-- Copyright (c) 2006-2013 Fabien Fleutot and others.
--
-- All rights reserved.
--
-- This program and the accompanying materials are made available
-- under the terms of the Eclipse Public License v1.0 which
-- accompanies this distribution, and is available at
-- http://www.eclipse.org/legal/epl-v10.html
--
-- This program and the accompanying materials are also made available
-- under the terms of the MIT public license which accompanies this
-- distribution, and is available at http://www.lua.org/license.html
--
-- Contributors:
-- Fabien Fleutot - API and implementation
--
--------------------------------------------------------------------------------
require 'checks'
local gg = require 'metalua.grammar.generator'
local M = { }
function M.opt(mlc, primary, a_type)
checks('table', 'table|function', 'string')
return gg.sequence{
primary,
gg.onkeyword{ "#", function() return assert(mlc.annot[a_type]) end },
builder = function(x)
local t, annot = unpack(x)
return annot and { tag='Annot', t, annot } or t
end }
end
-- split a list of "foo" and "`Annot{foo, annot}" into a list of "foo"
-- and a list of "annot".
-- No annot list is returned if none of the elements were annotated.
function M.split(lst)
local x, a, some = { }, { }, false
for i, p in ipairs(lst) do
if p.tag=='Annot' then
some, x[i], a[i] = true, unpack(p)
else x[i] = p end
end
if some then return x, a else return lst end
end
return M

View File

@ -0,0 +1,112 @@
--------------------------------------------------------------------------------
-- Copyright (c) 2006-2013 Fabien Fleutot and others.
--
-- All rights reserved.
--
-- This program and the accompanying materials are made available
-- under the terms of the Eclipse Public License v1.0 which
-- accompanies this distribution, and is available at
-- http://www.eclipse.org/legal/epl-v10.html
--
-- This program and the accompanying materials are also made available
-- under the terms of the MIT public license which accompanies this
-- distribution, and is available at http://www.lua.org/license.html
--
-- Contributors:
-- Fabien Fleutot - API and implementation
--
--------------------------------------------------------------------------------
local gg = require 'metalua.grammar.generator'
return function(M)
local _M = gg.future(M)
M.lexer :add '->'
local A = { }
local _A = gg.future(A)
M.annot = A
-- Type identifier: Lua keywords such as `"nil"` allowed.
function M.annot.tid(lx)
local w = lx :next()
local t = w.tag
if t=='Keyword' and w[1] :match '^[%a_][%w_]*$' or w.tag=='Id'
then return {tag='TId'; lineinfo=w.lineinfo; w[1]}
else return gg.parse_error (lx, 'tid expected') end
end
local field_types = { var='TVar'; const='TConst';
currently='TCurrently'; field='TField' }
-- TODO check lineinfo
function M.annot.tf(lx)
local tk = lx:next()
local w = tk[1]
local tag = field_types[w]
if not tag then error ('Invalid field type '..w)
elseif tag=='TField' then return {tag='TField'} else
local te = M.te(lx)
return {tag=tag; te}
end
end
M.annot.tebar_content = gg.list{
name = 'tebar content',
primary = _A.te,
separators = { ",", ";" },
terminators = ")" }
M.annot.tebar = gg.multisequence{
name = 'annot.tebar',
--{ '*', builder = 'TDynbar' }, -- maybe not user-available
{ '(', _A.tebar_content, ')',
builder = function(x) return x[1] end },
{ _A.te }
}
M.annot.te = gg.multisequence{
name = 'annot.te',
{ _A.tid, builder=function(x) return x[1] end },
{ '*', builder = 'TDyn' },
{ "[",
gg.list{
primary = gg.sequence{
_M.expr, "=", _A.tf,
builder = 'TPair'
},
separators = { ",", ";" },
terminators = { "]", "|" } },
gg.onkeyword{ "|", _A.tf },
"]",
builder = function(x)
local fields, other = unpack(x)
return { tag='TTable', other or {tag='TField'}, fields }
end }, -- "[ ... ]"
{ '(', _A.tebar_content, ')', '->', '(', _A.tebar_content, ')',
builder = function(x)
local p, r = unpack(x)
return {tag='TFunction', p, r }
end } }
M.annot.ts = gg.multisequence{
name = 'annot.ts',
{ 'return', _A.tebar_content, builder='TReturn' },
{ _A.tid, builder = function(x)
if x[1][1]=='pass' then return {tag='TPass'}
else error "Bad statement type" end
end } }
-- TODO: add parsers for statements:
-- #return tebar
-- #alias = te
-- #ell = tf
--[[
M.annot.stat_annot = gg.sequence{
gg.list{ primary=_A.tid, separators='.' },
'=',
XXX??,
builder = 'Annot' }
--]]
return M.annot
end

View File

@ -0,0 +1,27 @@
--------------------------------------------------------------------------------
-- Copyright (c) 2006-2013 Fabien Fleutot and others.
--
-- All rights reserved.
--
-- This program and the accompanying materials are made available
-- under the terms of the Eclipse Public License v1.0 which
-- accompanies this distribution, and is available at
-- http://www.eclipse.org/legal/epl-v10.html
--
-- This program and the accompanying materials are also made available
-- under the terms of the MIT public license which accompanies this
-- distribution, and is available at http://www.lua.org/license.html
--
-- Contributors:
-- Fabien Fleutot - API and implementation
--
--------------------------------------------------------------------------------
-- Shared common parser table. It will be filled by parser.init(),
-- and every other module will be able to call its elements at runtime.
--
-- If the table was directly created in parser.init, a circular
-- dependency would be created: parser.init depends on other modules to fill the table,
-- so other modules can't simultaneously depend on it.
return { }

View File

@ -0,0 +1,213 @@
-------------------------------------------------------------------------------
-- Copyright (c) 2006-2013 Fabien Fleutot and others.
--
-- All rights reserved.
--
-- This program and the accompanying materials are made available
-- under the terms of the Eclipse Public License v1.0 which
-- accompanies this distribution, and is available at
-- http://www.eclipse.org/legal/epl-v10.html
--
-- This program and the accompanying materials are also made available
-- under the terms of the MIT public license which accompanies this
-- distribution, and is available at http://www.lua.org/license.html
--
-- Contributors:
-- Fabien Fleutot - API and implementation
--
-------------------------------------------------------------------------------
-------------------------------------------------------------------------------
--
-- Exported API:
-- * [mlp.expr()]
-- * [mlp.expr_list()]
-- * [mlp.func_val()]
--
-------------------------------------------------------------------------------
local pp = require 'metalua.pprint'
local gg = require 'metalua.grammar.generator'
local annot = require 'metalua.compiler.parser.annot.generator'
return function(M)
local _M = gg.future(M)
local _table = gg.future(M, 'table')
local _meta = gg.future(M, 'meta') -- TODO move to ext?
local _annot = gg.future(M, 'annot') -- TODO move to annot
--------------------------------------------------------------------------------
-- Non-empty expression list. Actually, this isn't used here, but that's
-- handy to give to users.
--------------------------------------------------------------------------------
M.expr_list = gg.list{ primary=_M.expr, separators="," }
--------------------------------------------------------------------------------
-- Helpers for function applications / method applications
--------------------------------------------------------------------------------
M.func_args_content = gg.list{
name = "function arguments",
primary = _M.expr,
separators = ",",
terminators = ")" }
-- Used to parse methods
M.method_args = gg.multisequence{
name = "function argument(s)",
{ "{", _table.content, "}" },
{ "(", _M.func_args_content, ")", builder = unpack },
{ "+{", _meta.quote_content, "}" },
-- TODO lineinfo?
function(lx) local r = M.opt_string(lx); return r and {r} or { } end }
--------------------------------------------------------------------------------
-- [func_val] parses a function, from opening parameters parenthese to
-- "end" keyword included. Used for anonymous functions as well as
-- function declaration statements (both local and global).
--
-- It's wrapped in a [_func_val] eta expansion, so that when expr
-- parser uses the latter, they will notice updates of [func_val]
-- definitions.
--------------------------------------------------------------------------------
M.func_params_content = gg.list{
name="function parameters",
gg.multisequence{ { "...", builder = "Dots" }, annot.opt(M, _M.id, 'te') },
separators = ",", terminators = {")", "|"} }
-- TODO move to annot
M.func_val = gg.sequence{
name = "function body",
"(", _M.func_params_content, ")", _M.block, "end",
builder = function(x)
local params, body = unpack(x)
local annots, some = { }, false
for i, p in ipairs(params) do
if p.tag=='Annot' then
params[i], annots[i], some = p[1], p[2], true
else annots[i] = false end
end
if some then return { tag='Function', params, body, annots }
else return { tag='Function', params, body } end
end }
local func_val = function(lx) return M.func_val(lx) end
--------------------------------------------------------------------------------
-- Default parser for primary expressions
--------------------------------------------------------------------------------
function M.id_or_literal (lx)
local a = lx:next()
if a.tag~="Id" and a.tag~="String" and a.tag~="Number" then
local msg
if a.tag=='Eof' then
msg = "End of file reached when an expression was expected"
elseif a.tag=='Keyword' then
msg = "An expression was expected, and `"..a[1]..
"' can't start an expression"
else
msg = "Unexpected expr token " .. pp.tostring (a)
end
gg.parse_error (lx, msg)
end
return a
end
--------------------------------------------------------------------------------
-- Builder generator for operators. Wouldn't be worth it if "|x|" notation
-- were allowed, but then lua 5.1 wouldn't compile it
--------------------------------------------------------------------------------
-- opf1 = |op| |_,a| `Op{ op, a }
local function opf1 (op) return
function (_,a) return { tag="Op", op, a } end end
-- opf2 = |op| |a,_,b| `Op{ op, a, b }
local function opf2 (op) return
function (a,_,b) return { tag="Op", op, a, b } end end
-- opf2r = |op| |a,_,b| `Op{ op, b, a } -- (args reversed)
local function opf2r (op) return
function (a,_,b) return { tag="Op", op, b, a } end end
local function op_ne(a, _, b)
-- This version allows to remove the "ne" operator from the AST definition.
-- However, it doesn't always produce the exact same bytecode as Lua 5.1.
return { tag="Op", "not",
{ tag="Op", "eq", a, b, lineinfo= {
first = a.lineinfo.first, last = b.lineinfo.last } } }
end
--------------------------------------------------------------------------------
--
-- complete expression
--
--------------------------------------------------------------------------------
-- FIXME: set line number. In [expr] transformers probably
M.expr = gg.expr {
name = "expression",
primary = gg.multisequence{
name = "expr primary",
{ "(", _M.expr, ")", builder = "Paren" },
{ "function", _M.func_val, builder = unpack },
{ "-{", _meta.splice_content, "}", builder = unpack },
{ "+{", _meta.quote_content, "}", builder = unpack },
{ "nil", builder = "Nil" },
{ "true", builder = "True" },
{ "false", builder = "False" },
{ "...", builder = "Dots" },
{ "{", _table.content, "}", builder = unpack },
_M.id_or_literal },
infix = {
name = "expr infix op",
{ "+", prec = 60, builder = opf2 "add" },
{ "-", prec = 60, builder = opf2 "sub" },
{ "*", prec = 70, builder = opf2 "mul" },
{ "/", prec = 70, builder = opf2 "div" },
{ "%", prec = 70, builder = opf2 "mod" },
{ "^", prec = 90, builder = opf2 "pow", assoc = "right" },
{ "//", prec = 70, builder = opf2 "idiv" },
{ "&", prec = 36, builder = opf2 "band" },
{ "|", prec = 32, builder = opf2 "bor" },
{ "~", prec = 34, builder = opf2 "bxor" },
{ "<<", prec = 38, builder = opf2 "shl" },
{ ">>", prec = 38, builder = opf2 "shr" },
{ "..", prec = 40, builder = opf2 "concat", assoc = "right" },
{ "==", prec = 30, builder = opf2 "eq" },
{ "~=", prec = 30, builder = op_ne },
{ "<", prec = 30, builder = opf2 "lt" },
{ "<=", prec = 30, builder = opf2 "le" },
{ ">", prec = 30, builder = opf2r "lt" },
{ ">=", prec = 30, builder = opf2r "le" },
{ "and",prec = 20, builder = opf2 "and" },
{ "or", prec = 10, builder = opf2 "or" } },
prefix = {
name = "expr prefix op",
{ "not", prec = 80, builder = opf1 "not" },
{ "#", prec = 80, builder = opf1 "len" },
{ "~", prec = 80, builder = opf2 "bnot" },
{ "-", prec = 80, builder = opf1 "unm" } },
suffix = {
name = "expr suffix op",
{ "[", _M.expr, "]", builder = function (tab, idx)
return {tag="Index", tab, idx[1]} end},
{ ".", _M.id, builder = function (tab, field)
return {tag="Index", tab, _M.id2string(field[1])} end },
{ "(", _M.func_args_content, ")", builder = function(f, args)
return {tag="Call", f, unpack(args[1])} end },
{ "{", _table.content, "}", builder = function (f, arg)
return {tag="Call", f, arg[1]} end},
{ ":", _M.id, _M.method_args, builder = function (obj, post)
local m_name, args = unpack(post)
return {tag="Invoke", obj, _M.id2string(m_name), unpack(args)} end},
{ "+{", _meta.quote_content, "}", builder = function (f, arg)
return {tag="Call", f, arg[1] } end },
default = { name="opt_string_arg", parse = _M.opt_string, builder = function(f, arg)
return {tag="Call", f, arg } end } } }
return M
end

View File

@ -0,0 +1,96 @@
-------------------------------------------------------------------------------
-- Copyright (c) 2006-2013 Fabien Fleutot and others.
--
-- All rights reserved.
--
-- This program and the accompanying materials are made available
-- under the terms of the Eclipse Public License v1.0 which
-- accompanies this distribution, and is available at
-- http://www.eclipse.org/legal/epl-v10.html
--
-- This program and the accompanying materials are also made available
-- under the terms of the MIT public license which accompanies this
-- distribution, and is available at http://www.lua.org/license.html
--
-- Contributors:
-- Fabien Fleutot - API and implementation
--
-------------------------------------------------------------------------------
--------------------------------------------------------------------------------
--
-- Non-Lua syntax extensions
--
--------------------------------------------------------------------------------
local gg = require 'metalua.grammar.generator'
return function(M)
local _M = gg.future(M)
---------------------------------------------------------------------------
-- Algebraic Datatypes
----------------------------------------------------------------------------
local function adt (lx)
local node = _M.id (lx)
local tagval = node[1]
-- tagkey = `Pair{ `String "key", `String{ -{tagval} } }
local tagkey = { tag="Pair", {tag="String", "tag"}, {tag="String", tagval} }
if lx:peek().tag == "String" or lx:peek().tag == "Number" then
-- TODO support boolean litterals
return { tag="Table", tagkey, lx:next() }
elseif lx:is_keyword (lx:peek(), "{") then
local x = M.table.table (lx)
table.insert (x, 1, tagkey)
return x
else return { tag="Table", tagkey } end
end
M.adt = gg.sequence{ "`", adt, builder = unpack }
M.expr.primary :add(M.adt)
----------------------------------------------------------------------------
-- Anonymous lambda
----------------------------------------------------------------------------
M.lambda_expr = gg.sequence{
"|", _M.func_params_content, "|", _M.expr,
builder = function (x)
local li = x[2].lineinfo
return { tag="Function", x[1],
{ {tag="Return", x[2], lineinfo=li }, lineinfo=li } }
end }
M.expr.primary :add (M.lambda_expr)
--------------------------------------------------------------------------------
-- Allows to write "a `f` b" instead of "f(a, b)". Taken from Haskell.
--------------------------------------------------------------------------------
function M.expr_in_backquotes (lx) return M.expr(lx, 35) end -- 35=limited precedence
M.expr.infix :add{ name = "infix function",
"`", _M.expr_in_backquotes, "`", prec = 35, assoc="left",
builder = function(a, op, b) return {tag="Call", op[1], a, b} end }
--------------------------------------------------------------------------------
-- C-style op+assignments
-- TODO: no protection against side-effects in LHS vars.
--------------------------------------------------------------------------------
local function op_assign(kw, op)
local function rhs(a, b) return { tag="Op", op, a, b } end
local function f(a,b)
if #a ~= #b then gg.parse_error "assymetric operator+assignment" end
local right = { }
local r = { tag="Set", a, right }
for i=1, #a do right[i] = { tag="Op", op, a[i], b[i] } end
return r
end
M.lexer :add (kw)
M.assignments[kw] = f
end
local ops = { add='+='; sub='-='; mul='*='; div='/=' }
for ast_op_name, keyword in pairs(ops) do op_assign(keyword, ast_op_name) end
return M
end

View File

@ -0,0 +1,44 @@
--------------------------------------------------------------------------------
-- Copyright (c) 2006-2014 Fabien Fleutot and others.
--
-- All rights reserved.
--
-- This program and the accompanying materials are made available
-- under the terms of the Eclipse Public License v1.0 which
-- accompanies this distribution, and is available at
-- http://www.eclipse.org/legal/epl-v10.html
--
-- This program and the accompanying materials are also made available
-- under the terms of the MIT public license which accompanies this
-- distribution, and is available at http://www.lua.org/license.html
--
-- Contributors:
-- Fabien Fleutot - API and implementation
--
--------------------------------------------------------------------------------
----------------------------------------------------------------------
-- Generate a new lua-specific lexer, derived from the generic lexer.
----------------------------------------------------------------------
local generic_lexer = require 'metalua.grammar.lexer'
return function()
local lexer = generic_lexer.lexer :clone()
local keywords = {
"and", "break", "do", "else", "elseif",
"end", "false", "for", "function",
"goto", -- Lua5.2
"if",
"in", "local", "nil", "not", "or", "repeat",
"return", "then", "true", "until", "while",
"...", "..", "==", ">=", "<=", "~=",
"<<", ">>", "//", -- Lua5.3
"::", -- Lua5.2
"+{", "-{" } -- Metalua
for _, w in ipairs(keywords) do lexer :add (w) end
return lexer
end

View File

@ -0,0 +1,138 @@
-------------------------------------------------------------------------------
-- Copyright (c) 2006-2014 Fabien Fleutot and others.
--
-- All rights reserved.
--
-- This program and the accompanying materials are made available
-- under the terms of the Eclipse Public License v1.0 which
-- accompanies this distribution, and is available at
-- http://www.eclipse.org/legal/epl-v10.html
--
-- This program and the accompanying materials are also made available
-- under the terms of the MIT public license which accompanies this
-- distribution, and is available at http://www.lua.org/license.html
--
-- Contributors:
-- Fabien Fleutot - API and implementation
--
-------------------------------------------------------------------------------
-- Compile-time metaprogramming features: splicing ASTs generated during compilation,
-- AST quasi-quoting helpers.
local gg = require 'metalua.grammar.generator'
return function(M)
local _M = gg.future(M)
M.meta={ }
local _MM = gg.future(M.meta)
--------------------------------------------------------------------------------
-- External splicing: compile an AST into a chunk, load and evaluate
-- that chunk, and replace the chunk by its result (which must also be
-- an AST).
--------------------------------------------------------------------------------
-- TODO: that's not part of the parser
function M.meta.eval (ast)
-- TODO: should there be one mlc per splice, or per parser instance?
local mlc = require 'metalua.compiler'.new()
local f = mlc :ast_to_function (ast, '=splice')
local result=f(M) -- splices act on the current parser
return result
end
----------------------------------------------------------------------------
-- Going from an AST to an AST representing that AST
-- the only hash-part key being lifted is `"tag"`.
-- Doesn't lift subtrees protected inside a `Splice{ ... }.
-- e.g. change `Foo{ 123 } into
-- `Table{ `Pair{ `String "tag", `String "foo" }, `Number 123 }
----------------------------------------------------------------------------
local function lift (t)
--print("QUOTING:", table.tostring(t, 60,'nohash'))
local cases = { }
function cases.table (t)
local mt = { tag = "Table" }
--table.insert (mt, { tag = "Pair", quote "quote", { tag = "True" } })
if t.tag == "Splice" then
assert (#t==1, "Invalid splice")
local sp = t[1]
return sp
elseif t.tag then
table.insert (mt, { tag="Pair", lift "tag", lift(t.tag) })
end
for _, v in ipairs (t) do
table.insert (mt, lift(v))
end
return mt
end
function cases.number (t) return { tag = "Number", t, quote = true } end
function cases.string (t) return { tag = "String", t, quote = true } end
function cases.boolean (t) return { tag = t and "True" or "False", t, quote = true } end
local f = cases [type(t)]
if f then return f(t) else error ("Cannot quote an AST containing "..tostring(t)) end
end
M.meta.lift = lift
--------------------------------------------------------------------------------
-- when this variable is false, code inside [-{...}] is compiled and
-- avaluated immediately. When it's true (supposedly when we're
-- parsing data inside a quasiquote), [-{foo}] is replaced by
-- [`Splice{foo}], which will be unpacked by [quote()].
--------------------------------------------------------------------------------
local in_a_quote = false
--------------------------------------------------------------------------------
-- Parse the inside of a "-{ ... }"
--------------------------------------------------------------------------------
function M.meta.splice_content (lx)
local parser_name = "expr"
if lx:is_keyword (lx:peek(2), ":") then
local a = lx:next()
lx:next() -- skip ":"
assert (a.tag=="Id", "Invalid splice parser name")
parser_name = a[1]
end
-- TODO FIXME running a new parser with the old lexer?!
local parser = require 'metalua.compiler.parser'.new()
local ast = parser [parser_name](lx)
if in_a_quote then -- only prevent quotation in this subtree
--printf("SPLICE_IN_QUOTE:\n%s", _G.table.tostring(ast, "nohash", 60))
return { tag="Splice", ast }
else -- convert in a block, eval, replace with result
if parser_name == "expr" then ast = { { tag="Return", ast } }
elseif parser_name == "stat" then ast = { ast }
elseif parser_name ~= "block" then
error ("splice content must be an expr, stat or block") end
--printf("EXEC THIS SPLICE:\n%s", _G.table.tostring(ast, "nohash", 60))
return M.meta.eval (ast)
end
end
M.meta.splice = gg.sequence{ "-{", _MM.splice_content, "}", builder=unpack }
--------------------------------------------------------------------------------
-- Parse the inside of a "+{ ... }"
--------------------------------------------------------------------------------
function M.meta.quote_content (lx)
local parser
if lx:is_keyword (lx:peek(2), ":") then -- +{parser: content }
local parser_name = M.id(lx)[1]
parser = M[parser_name]
lx:next() -- skip ":"
else -- +{ content }
parser = M.expr
end
local prev_iq = in_a_quote
in_a_quote = true
--print("IN_A_QUOTE")
local content = parser (lx)
local q_content = M.meta.lift (content)
in_a_quote = prev_iq
return q_content
end
return M
end

View File

@ -0,0 +1,147 @@
-------------------------------------------------------------------------------
-- Copyright (c) 2006-2013 Fabien Fleutot and others.
--
-- All rights reserved.
--
-- This program and the accompanying materials are made available
-- under the terms of the Eclipse Public License v1.0 which
-- accompanies this distribution, and is available at
-- http://www.eclipse.org/legal/epl-v10.html
--
-- This program and the accompanying materials are also made available
-- under the terms of the MIT public license which accompanies this
-- distribution, and is available at http://www.lua.org/license.html
--
-- Contributors:
-- Fabien Fleutot - API and implementation
--
-------------------------------------------------------------------------------
-------------------------------------------------------------------------------
--
-- Summary: metalua parser, miscellaneous utility functions.
--
-------------------------------------------------------------------------------
--------------------------------------------------------------------------------
--
-- Exported API:
-- * [mlp.fget()]
-- * [mlp.id()]
-- * [mlp.opt_id()]
-- * [mlp.id_list()]
-- * [mlp.string()]
-- * [mlp.opt_string()]
-- * [mlp.id2string()]
--
--------------------------------------------------------------------------------
local gg = require 'metalua.grammar.generator'
-- TODO: replace splice-aware versions with naive ones, move etensions in ./meta
return function(M)
local _M = gg.future(M)
--------------------------------------------------------------------------------
-- Try to read an identifier (possibly as a splice), or return [false] if no
-- id is found.
--------------------------------------------------------------------------------
function M.opt_id (lx)
local a = lx:peek();
if lx:is_keyword (a, "-{") then
local v = M.meta.splice(lx)
if v.tag ~= "Id" and v.tag ~= "Splice" then
gg.parse_error(lx, "Bad id splice")
end
return v
elseif a.tag == "Id" then return lx:next()
else return false end
end
--------------------------------------------------------------------------------
-- Mandatory reading of an id: causes an error if it can't read one.
--------------------------------------------------------------------------------
function M.id (lx)
return M.opt_id (lx) or gg.parse_error(lx,"Identifier expected")
end
--------------------------------------------------------------------------------
-- Common helper function
--------------------------------------------------------------------------------
M.id_list = gg.list { primary = _M.id, separators = "," }
--------------------------------------------------------------------------------
-- Converts an identifier into a string. Hopefully one day it'll handle
-- splices gracefully, but that proves quite tricky.
--------------------------------------------------------------------------------
function M.id2string (id)
--print("id2string:", disp.ast(id))
if id.tag == "Id" then id.tag = "String"; return id
elseif id.tag == "Splice" then
error ("id2string on splice not implemented")
-- Evaluating id[1] will produce `Id{ xxx },
-- and we want it to produce `String{ xxx }.
-- The following is the plain notation of:
-- +{ `String{ `Index{ `Splice{ -{id[1]} }, `Number 1 } } }
return { tag="String", { tag="Index", { tag="Splice", id[1] },
{ tag="Number", 1 } } }
else error ("Identifier expected: "..table.tostring(id, 'nohash')) end
end
--------------------------------------------------------------------------------
-- Read a string, possibly spliced, or return an error if it can't
--------------------------------------------------------------------------------
function M.string (lx)
local a = lx:peek()
if lx:is_keyword (a, "-{") then
local v = M.meta.splice(lx)
if v.tag ~= "String" and v.tag ~= "Splice" then
gg.parse_error(lx,"Bad string splice")
end
return v
elseif a.tag == "String" then return lx:next()
else error "String expected" end
end
--------------------------------------------------------------------------------
-- Try to read a string, or return false if it can't. No splice allowed.
--------------------------------------------------------------------------------
function M.opt_string (lx)
return lx:peek().tag == "String" and lx:next()
end
--------------------------------------------------------------------------------
-- Chunk reader: block + Eof
--------------------------------------------------------------------------------
function M.skip_initial_sharp_comment (lx)
-- Dirty hack: I'm happily fondling lexer's private parts
-- FIXME: redundant with lexer:newstream()
lx :sync()
local i = lx.src:match ("^#.-\n()", lx.i)
if i then
lx.i = i
lx.column_offset = i
lx.line = lx.line and lx.line + 1 or 1
end
end
local function chunk (lx)
if lx:peek().tag == 'Eof' then
return { } -- handle empty files
else
M.skip_initial_sharp_comment (lx)
local chunk = M.block (lx)
if lx:peek().tag ~= "Eof" then
gg.parse_error(lx, "End-of-file expected")
end
return chunk
end
end
-- chunk is wrapped in a sequence so that it has a "transformer" field.
M.chunk = gg.sequence { chunk, builder = unpack }
return M
end

View File

@ -0,0 +1,283 @@
------------------------------------------------------------------------------
-- Copyright (c) 2006-2013 Fabien Fleutot and others.
--
-- All rights reserved.
--
-- This program and the accompanying materials are made available
-- under the terms of the Eclipse Public License v1.0 which
-- accompanies this distribution, and is available at
-- http://www.eclipse.org/legal/epl-v10.html
--
-- This program and the accompanying materials are also made available
-- under the terms of the MIT public license which accompanies this
-- distribution, and is available at http://www.lua.org/license.html
--
-- Contributors:
-- Fabien Fleutot - API and implementation
--
-------------------------------------------------------------------------------
-------------------------------------------------------------------------------
--
-- Summary: metalua parser, statement/block parser. This is part of the
-- definition of module [mlp].
--
-------------------------------------------------------------------------------
-------------------------------------------------------------------------------
--
-- Exports API:
-- * [mlp.stat()]
-- * [mlp.block()]
-- * [mlp.for_header()]
--
-------------------------------------------------------------------------------
local lexer = require 'metalua.grammar.lexer'
local gg = require 'metalua.grammar.generator'
local annot = require 'metalua.compiler.parser.annot.generator'
--------------------------------------------------------------------------------
-- List of all keywords that indicate the end of a statement block. Users are
-- likely to extend this list when designing extensions.
--------------------------------------------------------------------------------
return function(M)
local _M = gg.future(M)
M.block_terminators = { "else", "elseif", "end", "until", ")", "}", "]" }
-- FIXME: this must be handled from within GG!!!
-- FIXME: there's no :add method in the list anyway. Added by gg.list?!
function M.block_terminators :add(x)
if type (x) == "table" then for _, y in ipairs(x) do self :add (y) end
else table.insert (self, x) end
end
----------------------------------------------------------------------------
-- list of statements, possibly followed by semicolons
----------------------------------------------------------------------------
M.block = gg.list {
name = "statements block",
terminators = M.block_terminators,
primary = function (lx)
-- FIXME use gg.optkeyword()
local x = M.stat (lx)
if lx:is_keyword (lx:peek(), ";") then lx:next() end
return x
end }
----------------------------------------------------------------------------
-- Helper function for "return <expr_list>" parsing.
-- Called when parsing return statements.
-- The specific test for initial ";" is because it's not a block terminator,
-- so without it gg.list would choke on "return ;" statements.
-- We don't make a modified copy of block_terminators because this list
-- is sometimes modified at runtime, and the return parser would get out of
-- sync if it was relying on a copy.
----------------------------------------------------------------------------
local return_expr_list_parser = gg.multisequence{
{ ";" , builder = function() return { } end },
default = gg.list {
_M.expr, separators = ",", terminators = M.block_terminators } }
local for_vars_list = gg.list{
name = "for variables list",
primary = _M.id,
separators = ",",
terminators = "in" }
----------------------------------------------------------------------------
-- for header, between [for] and [do] (exclusive).
-- Return the `Forxxx{...} AST, without the body element (the last one).
----------------------------------------------------------------------------
function M.for_header (lx)
local vars = M.id_list(lx)
if lx :is_keyword (lx:peek(), "=") then
if #vars ~= 1 then
gg.parse_error (lx, "numeric for only accepts one variable")
end
lx:next() -- skip "="
local exprs = M.expr_list (lx)
if #exprs < 2 or #exprs > 3 then
gg.parse_error (lx, "numeric for requires 2 or 3 boundaries")
end
return { tag="Fornum", vars[1], unpack (exprs) }
else
if not lx :is_keyword (lx :next(), "in") then
gg.parse_error (lx, '"=" or "in" expected in for loop')
end
local exprs = M.expr_list (lx)
return { tag="Forin", vars, exprs }
end
end
----------------------------------------------------------------------------
-- Function def parser helper: id ( . id ) *
----------------------------------------------------------------------------
local function fn_builder (list)
local acc = list[1]
local first = acc.lineinfo.first
for i = 2, #list do
local index = M.id2string(list[i])
local li = lexer.new_lineinfo(first, index.lineinfo.last)
acc = { tag="Index", acc, index, lineinfo=li }
end
return acc
end
local func_name = gg.list{ _M.id, separators = ".", builder = fn_builder }
----------------------------------------------------------------------------
-- Function def parser helper: ( : id )?
----------------------------------------------------------------------------
local method_name = gg.onkeyword{ name = "method invocation", ":", _M.id,
transformers = { function(x) return x and x.tag=='Id' and M.id2string(x) end } }
----------------------------------------------------------------------------
-- Function def builder
----------------------------------------------------------------------------
local function funcdef_builder(x)
local name, method, func = unpack(x)
if method then
name = { tag="Index", name, method,
lineinfo = {
first = name.lineinfo.first,
last = method.lineinfo.last } }
table.insert (func[1], 1, {tag="Id", "self"})
end
local r = { tag="Set", {name}, {func} }
r[1].lineinfo = name.lineinfo
r[2].lineinfo = func.lineinfo
return r
end
----------------------------------------------------------------------------
-- if statement builder
----------------------------------------------------------------------------
local function if_builder (x)
local cond_block_pairs, else_block, r = x[1], x[2], {tag="If"}
local n_pairs = #cond_block_pairs
for i = 1, n_pairs do
local cond, block = unpack(cond_block_pairs[i])
r[2*i-1], r[2*i] = cond, block
end
if else_block then table.insert(r, #r+1, else_block) end
return r
end
--------------------------------------------------------------------------------
-- produce a list of (expr,block) pairs
--------------------------------------------------------------------------------
local elseifs_parser = gg.list {
gg.sequence { _M.expr, "then", _M.block , name='elseif parser' },
separators = "elseif",
terminators = { "else", "end" }
}
local annot_expr = gg.sequence {
_M.expr,
gg.onkeyword{ "#", gg.future(M, 'annot').tf },
builder = function(x)
local e, a = unpack(x)
if a then return { tag='Annot', e, a }
else return e end
end }
local annot_expr_list = gg.list {
primary = annot.opt(M, _M.expr, 'tf'), separators = ',' }
------------------------------------------------------------------------
-- assignments and calls: statements that don't start with a keyword
------------------------------------------------------------------------
local function assign_or_call_stat_parser (lx)
local e = annot_expr_list (lx)
local a = lx:is_keyword(lx:peek())
local op = a and M.assignments[a]
-- TODO: refactor annotations
if op then
--FIXME: check that [e] is a LHS
lx :next()
local annots
e, annots = annot.split(e)
local v = M.expr_list (lx)
if type(op)=="string" then return { tag=op, e, v, annots }
else return op (e, v) end
else
assert (#e > 0)
if #e > 1 then
gg.parse_error (lx,
"comma is not a valid statement separator; statement can be "..
"separated by semicolons, or not separated at all")
elseif e[1].tag ~= "Call" and e[1].tag ~= "Invoke" then
local typename
if e[1].tag == 'Id' then
typename = '("'..e[1][1]..'") is an identifier'
elseif e[1].tag == 'Op' then
typename = "is an arithmetic operation"
else typename = "is of type '"..(e[1].tag or "<list>").."'" end
gg.parse_error (lx,
"This expression %s; "..
"a statement was expected, and only function and method call "..
"expressions can be used as statements", typename);
end
return e[1]
end
end
M.local_stat_parser = gg.multisequence{
-- local function <name> <func_val>
{ "function", _M.id, _M.func_val, builder =
function(x)
local vars = { x[1], lineinfo = x[1].lineinfo }
local vals = { x[2], lineinfo = x[2].lineinfo }
return { tag="Localrec", vars, vals }
end },
-- local <id_list> ( = <expr_list> )?
default = gg.sequence{
gg.list{
primary = annot.opt(M, _M.id, 'tf'),
separators = ',' },
gg.onkeyword{ "=", _M.expr_list },
builder = function(x)
local annotated_left, right = unpack(x)
local left, annotations = annot.split(annotated_left)
return {tag="Local", left, right or { }, annotations }
end } }
------------------------------------------------------------------------
-- statement
------------------------------------------------------------------------
M.stat = gg.multisequence {
name = "statement",
{ "do", _M.block, "end", builder =
function (x) return { tag="Do", unpack (x[1]) } end },
{ "for", _M.for_header, "do", _M.block, "end", builder =
function (x) x[1][#x[1]+1] = x[2]; return x[1] end },
{ "function", func_name, method_name, _M.func_val, builder=funcdef_builder },
{ "while", _M.expr, "do", _M.block, "end", builder = "While" },
{ "repeat", _M.block, "until", _M.expr, builder = "Repeat" },
{ "local", _M.local_stat_parser, builder = unpack },
{ "return", return_expr_list_parser, builder =
function(x) x[1].tag='Return'; return x[1] end },
{ "goto", _M.id, builder =
function(x) x[1].tag='Goto'; return x[1] end },
{ "::", _M.id, "::", builder =
function(x) x[1].tag='Label'; return x[1] end },
{ "break", builder = function() return { tag="Break" } end },
{ "-{", gg.future(M, 'meta').splice_content, "}", builder = unpack },
{ "if", gg.nonempty(elseifs_parser), gg.onkeyword{ "else", M.block }, "end",
builder = if_builder },
default = assign_or_call_stat_parser }
M.assignments = {
["="] = "Set"
}
function M.assignments:add(k, v) self[k] = v end
return M
end

View File

@ -0,0 +1,77 @@
--------------------------------------------------------------------------------
-- Copyright (c) 2006-2013 Fabien Fleutot and others.
--
-- All rights reserved.
--
-- This program and the accompanying materials are made available
-- under the terms of the Eclipse Public License v1.0 which
-- accompanies this distribution, and is available at
-- http://www.eclipse.org/legal/epl-v10.html
--
-- This program and the accompanying materials are also made available
-- under the terms of the MIT public license which accompanies this
-- distribution, and is available at http://www.lua.org/license.html
--
-- Contributors:
-- Fabien Fleutot - API and implementation
--
--------------------------------------------------------------------------------
--------------------------------------------------------------------------------
--
-- Exported API:
-- * [M.table_bracket_field()]
-- * [M.table_field()]
-- * [M.table_content()]
-- * [M.table()]
--
-- KNOWN BUG: doesn't handle final ";" or "," before final "}"
--
--------------------------------------------------------------------------------
local gg = require 'metalua.grammar.generator'
return function(M)
M.table = { }
local _table = gg.future(M.table)
local _expr = gg.future(M).expr
--------------------------------------------------------------------------------
-- `[key] = value` table field definition
--------------------------------------------------------------------------------
M.table.bracket_pair = gg.sequence{ "[", _expr, "]", "=", _expr, builder = "Pair" }
--------------------------------------------------------------------------------
-- table element parser: list value, `id = value` pair or `[value] = value` pair.
--------------------------------------------------------------------------------
function M.table.element (lx)
if lx :is_keyword (lx :peek(), "[") then return M.table.bracket_pair(lx) end
local e = M.expr (lx)
if not lx :is_keyword (lx :peek(), "=") then return e end
lx :next(); -- skip the "="
local key = M.id2string(e) -- will fail on non-identifiers
local val = M.expr(lx)
local r = { tag="Pair", key, val }
r.lineinfo = { first = key.lineinfo.first, last = val.lineinfo.last }
return r
end
-----------------------------------------------------------------------------
-- table constructor, without enclosing braces; returns a full table object
-----------------------------------------------------------------------------
M.table.content = gg.list {
-- eta expansion to allow patching the element definition
primary = _table.element,
separators = { ",", ";" },
terminators = "}",
builder = "Table" }
--------------------------------------------------------------------------------
-- complete table constructor including [{...}]
--------------------------------------------------------------------------------
-- TODO beware, stat and expr use only table.content, this can't be patched.
M.table.table = gg.sequence{ "{", _table.content, "}", builder = unpack }
return M
end

View File

@ -0,0 +1,832 @@
--------------------------------------------------------------------------------
-- Copyright (c) 2006-2013 Fabien Fleutot and others.
--
-- All rights reserved.
--
-- This program and the accompanying materials are made available
-- under the terms of the Eclipse Public License v1.0 which
-- accompanies this distribution, and is available at
-- http://www.eclipse.org/legal/epl-v10.html
--
-- This program and the accompanying materials are also made available
-- under the terms of the MIT public license which accompanies this
-- distribution, and is available at http://www.lua.org/license.html
--
-- Contributors:
-- Fabien Fleutot - API and implementation
--
--------------------------------------------------------------------------------
--------------------------------------------------------------------------------
--
-- Summary: parser generator. Collection of higher order functors,
-- which allow to build and combine parsers. Relies on a lexer
-- that supports the same API as the one exposed in mll.lua.
--
--------------------------------------------------------------------------------
--------------------------------------------------------------------------------
--
-- Exported API:
--
-- Parser generators:
-- * [gg.sequence()]
-- * [gg.multisequence()]
-- * [gg.expr()]
-- * [gg.list()]
-- * [gg.onkeyword()]
-- * [gg.optkeyword()]
--
-- Other functions:
-- * [gg.parse_error()]
-- * [gg.make_parser()]
-- * [gg.is_parser()]
--
--------------------------------------------------------------------------------
local M = { }
local lexer = require 'metalua.grammar.lexer'
--------------------------------------------------------------------------------
-- Symbol generator: [gensym()] returns a guaranteed-to-be-unique identifier.
-- The main purpose is to avoid variable capture in macros.
--
-- If a string is passed as an argument, theis string will be part of the
-- id name (helpful for macro debugging)
--------------------------------------------------------------------------------
local gensymidx = 0
function M.gensym (arg)
gensymidx = gensymidx + 1
return { tag="Id", string.format(".%i.%s", gensymidx, arg or "")}
end
-------------------------------------------------------------------------------
-- parser metatable, which maps __call to method parse, and adds some
-- error tracing boilerplate.
-------------------------------------------------------------------------------
local parser_metatable = { }
function parser_metatable :__call (lx, ...)
return self :parse (lx, ...)
end
-------------------------------------------------------------------------------
-- Turn a table into a parser, mainly by setting the metatable.
-------------------------------------------------------------------------------
function M.make_parser(kind, p)
p.kind = kind
if not p.transformers then p.transformers = { } end
function p.transformers:add (x)
table.insert (self, x)
end
setmetatable (p, parser_metatable)
return p
end
-------------------------------------------------------------------------------
-- Return true iff [x] is a parser.
-- If it's a gg-generated parser, return the name of its kind.
-------------------------------------------------------------------------------
function M.is_parser (x)
return type(x)=="function" or getmetatable(x)==parser_metatable and x.kind
end
-------------------------------------------------------------------------------
-- Parse a sequence, without applying builder nor transformers.
-------------------------------------------------------------------------------
local function raw_parse_sequence (lx, p)
local r = { }
for i=1, #p do
local e=p[i]
if type(e) == "string" then
local kw = lx :next()
if not lx :is_keyword (kw, e) then
M.parse_error(
lx, "A keyword was expected, probably `%s'.", e)
end
elseif M.is_parser (e) then
table.insert (r, e(lx))
else -- Invalid parser definition, this is *not* a parsing error
error(string.format(
"Sequence `%s': element #%i is neither a string nor a parser: %s",
p.name, i, table.tostring(e)))
end
end
return r
end
-------------------------------------------------------------------------------
-- Parse a multisequence, without applying multisequence transformers.
-- The sequences are completely parsed.
-------------------------------------------------------------------------------
local function raw_parse_multisequence (lx, sequence_table, default)
local seq_parser = sequence_table[lx:is_keyword(lx:peek())]
if seq_parser then return seq_parser (lx)
elseif default then return default (lx)
else return false end
end
-------------------------------------------------------------------------------
-- Applies all transformers listed in parser on ast.
-------------------------------------------------------------------------------
local function transform (ast, parser, fli, lli)
if parser.transformers then
for _, t in ipairs (parser.transformers) do ast = t(ast) or ast end
end
if type(ast) == 'table' then
local ali = ast.lineinfo
if not ali or ali.first~=fli or ali.last~=lli then
ast.lineinfo = lexer.new_lineinfo(fli, lli)
end
end
return ast
end
-------------------------------------------------------------------------------
-- Generate a tracable parsing error (not implemented yet)
-------------------------------------------------------------------------------
function M.parse_error(lx, fmt, ...)
local li = lx:lineinfo_left()
local file, line, column, offset, positions
if li then
file, line, column, offset = li.source, li.line, li.column, li.offset
positions = { first = li, last = li }
else
line, column, offset = -1, -1, -1
end
local msg = string.format("line %i, char %i: "..fmt, line, column, ...)
if file and file~='?' then msg = "file "..file..", "..msg end
local src = lx.src
if offset>0 and src then
local i, j = offset, offset
while src:sub(i,i) ~= '\n' and i>=0 do i=i-1 end
while src:sub(j,j) ~= '\n' and j<=#src do j=j+1 end
local srcline = src:sub (i+1, j-1)
local idx = string.rep (" ", column).."^"
msg = string.format("%s\n>>> %s\n>>> %s", msg, srcline, idx)
end
--lx :kill()
error(msg)
end
-------------------------------------------------------------------------------
--
-- Sequence parser generator
--
-------------------------------------------------------------------------------
-- Input fields:
--
-- * [builder]: how to build an AST out of sequence parts. let [x] be the list
-- of subparser results (keywords are simply omitted). [builder] can be:
-- - [nil], in which case the result of parsing is simply [x]
-- - a string, which is then put as a tag on [x]
-- - a function, which takes [x] as a parameter and returns an AST.
--
-- * [name]: the name of the parser. Used for debug messages
--
-- * [transformers]: a list of AST->AST functions, applied in order on ASTs
-- returned by the parser.
--
-- * Table-part entries corresponds to keywords (strings) and subparsers
-- (function and callable objects).
--
-- After creation, the following fields are added:
-- * [parse] the parsing function lexer->AST
-- * [kind] == "sequence"
-- * [name] is set, if it wasn't in the input.
--
-------------------------------------------------------------------------------
function M.sequence (p)
M.make_parser ("sequence", p)
-------------------------------------------------------------------
-- Parsing method
-------------------------------------------------------------------
function p:parse (lx)
-- Raw parsing:
local fli = lx:lineinfo_right()
local seq = raw_parse_sequence (lx, self)
local lli = lx:lineinfo_left()
-- Builder application:
local builder, tb = self.builder, type (self.builder)
if tb == "string" then seq.tag = builder
elseif tb == "function" or builder and builder.__call then seq = builder(seq)
elseif builder == nil then -- nothing
else error ("Invalid builder of type "..tb.." in sequence") end
seq = transform (seq, self, fli, lli)
assert (not seq or seq.lineinfo)
return seq
end
-------------------------------------------------------------------
-- Construction
-------------------------------------------------------------------
-- Try to build a proper name
if p.name then
-- don't touch existing name
elseif type(p[1])=="string" then -- find name based on 1st keyword
if #p==1 then p.name=p[1]
elseif type(p[#p])=="string" then
p.name = p[1] .. " ... " .. p[#p]
else p.name = p[1] .. " ..." end
else -- can't find a decent name
p.name = "unnamed_sequence"
end
return p
end --</sequence>
-------------------------------------------------------------------------------
--
-- Multiple, keyword-driven, sequence parser generator
--
-------------------------------------------------------------------------------
-- in [p], useful fields are:
--
-- * [transformers]: as usual
--
-- * [name]: as usual
--
-- * Table-part entries must be sequence parsers, or tables which can
-- be turned into a sequence parser by [gg.sequence]. These
-- sequences must start with a keyword, and this initial keyword
-- must be different for each sequence. The table-part entries will
-- be removed after [gg.multisequence] returns.
--
-- * [default]: the parser to run if the next keyword in the lexer is
-- none of the registered initial keywords. If there's no default
-- parser and no suitable initial keyword, the multisequence parser
-- simply returns [false].
--
-- After creation, the following fields are added:
--
-- * [parse] the parsing function lexer->AST
--
-- * [sequences] the table of sequences, indexed by initial keywords.
--
-- * [add] method takes a sequence parser or a config table for
-- [gg.sequence], and adds/replaces the corresponding sequence
-- parser. If the keyword was already used, the former sequence is
-- removed and a warning is issued.
--
-- * [get] method returns a sequence by its initial keyword
--
-- * [kind] == "multisequence"
--
-------------------------------------------------------------------------------
function M.multisequence (p)
M.make_parser ("multisequence", p)
-------------------------------------------------------------------
-- Add a sequence (might be just a config table for [gg.sequence])
-------------------------------------------------------------------
function p :add (s)
-- compile if necessary:
local keyword = type(s)=='table' and s[1]
if type(s)=='table' and not M.is_parser(s) then M.sequence(s) end
if M.is_parser(s)~='sequence' or type(keyword)~='string' then
if self.default then -- two defaults
error ("In a multisequence parser, all but one sequences "..
"must start with a keyword")
else self.default = s end -- first default
else
if self.sequences[keyword] then -- duplicate keyword
-- TODO: warn that initial keyword `keyword` is overloaded in multiseq
end
self.sequences[keyword] = s
end
end -- </multisequence.add>
-------------------------------------------------------------------
-- Get the sequence starting with this keyword. [kw :: string]
-------------------------------------------------------------------
function p :get (kw) return self.sequences [kw] end
-------------------------------------------------------------------
-- Remove the sequence starting with keyword [kw :: string]
-------------------------------------------------------------------
function p :del (kw)
if not self.sequences[kw] then
-- TODO: warn that we try to delete a non-existent entry
end
local removed = self.sequences[kw]
self.sequences[kw] = nil
return removed
end
-------------------------------------------------------------------
-- Parsing method
-------------------------------------------------------------------
function p :parse (lx)
local fli = lx:lineinfo_right()
local x = raw_parse_multisequence (lx, self.sequences, self.default)
local lli = lx:lineinfo_left()
return transform (x, self, fli, lli)
end
-------------------------------------------------------------------
-- Construction
-------------------------------------------------------------------
-- Register the sequences passed to the constructor. They're going
-- from the array part of the parser to the hash part of field
-- [sequences]
p.sequences = { }
for i=1, #p do p :add (p[i]); p[i] = nil end
-- FIXME: why is this commented out?
--if p.default and not is_parser(p.default) then sequence(p.default) end
return p
end --</multisequence>
-------------------------------------------------------------------------------
--
-- Expression parser generator
--
-------------------------------------------------------------------------------
--
-- Expression configuration relies on three tables: [prefix], [infix]
-- and [suffix]. Moreover, the primary parser can be replaced by a
-- table: in this case the [primary] table will be passed to
-- [gg.multisequence] to create a parser.
--
-- Each of these tables is a modified multisequence parser: the
-- differences with respect to regular multisequence config tables are:
--
-- * the builder takes specific parameters:
-- - for [prefix], it takes the result of the prefix sequence parser,
-- and the prefixed expression
-- - for [infix], it takes the left-hand-side expression, the results
-- of the infix sequence parser, and the right-hand-side expression.
-- - for [suffix], it takes the suffixed expression, and the result
-- of the suffix sequence parser.
--
-- * the default field is a list, with parameters:
-- - [parser] the raw parsing function
-- - [transformers], as usual
-- - [prec], the operator's precedence
-- - [assoc] for [infix] table, the operator's associativity, which
-- can be "left", "right" or "flat" (default to left)
--
-- In [p], useful fields are:
-- * [transformers]: as usual
-- * [name]: as usual
-- * [primary]: the atomic expression parser, or a multisequence config
-- table (mandatory)
-- * [prefix]: prefix operators config table, see above.
-- * [infix]: infix operators config table, see above.
-- * [suffix]: suffix operators config table, see above.
--
-- After creation, these fields are added:
-- * [kind] == "expr"
-- * [parse] as usual
-- * each table is turned into a multisequence, and therefore has an
-- [add] method
--
-------------------------------------------------------------------------------
function M.expr (p)
M.make_parser ("expr", p)
-------------------------------------------------------------------
-- parser method.
-- In addition to the lexer, it takes an optional precedence:
-- it won't read expressions whose precedence is lower or equal
-- to [prec].
-------------------------------------------------------------------
function p :parse (lx, prec)
prec = prec or 0
------------------------------------------------------
-- Extract the right parser and the corresponding
-- options table, for (pre|in|suff)fix operators.
-- Options include prec, assoc, transformers.
------------------------------------------------------
local function get_parser_info (tab)
local p2 = tab :get (lx :is_keyword (lx :peek()))
if p2 then -- keyword-based sequence found
local function parser(lx) return raw_parse_sequence(lx, p2) end
return parser, p2
else -- Got to use the default parser
local d = tab.default
if d then return d.parse or d.parser, d
else return false, false end
end
end
------------------------------------------------------
-- Look for a prefix sequence. Multiple prefixes are
-- handled through the recursive [p.parse] call.
-- Notice the double-transform: one for the primary
-- expr, and one for the one with the prefix op.
------------------------------------------------------
local function handle_prefix ()
local fli = lx :lineinfo_right()
local p2_func, p2 = get_parser_info (self.prefix)
local op = p2_func and p2_func (lx)
if op then -- Keyword-based sequence found
local ili = lx :lineinfo_right() -- Intermediate LineInfo
local e = p2.builder (op, self :parse (lx, p2.prec))
local lli = lx :lineinfo_left()
return transform (transform (e, p2, ili, lli), self, fli, lli)
else -- No prefix found, get a primary expression
local e = self.primary(lx)
local lli = lx :lineinfo_left()
return transform (e, self, fli, lli)
end
end --</expr.parse.handle_prefix>
------------------------------------------------------
-- Look for an infix sequence+right-hand-side operand.
-- Return the whole binary expression result,
-- or false if no operator was found.
------------------------------------------------------
local function handle_infix (e)
local p2_func, p2 = get_parser_info (self.infix)
if not p2 then return false end
-----------------------------------------
-- Handle flattening operators: gather all operands
-- of the series in [list]; when a different operator
-- is found, stop, build from [list], [transform] and
-- return.
-----------------------------------------
if (not p2.prec or p2.prec>prec) and p2.assoc=="flat" then
local fli = lx:lineinfo_right()
local pflat, list = p2, { e }
repeat
local op = p2_func(lx)
if not op then break end
table.insert (list, self:parse (lx, p2.prec))
local _ -- We only care about checking that p2==pflat
_, p2 = get_parser_info (self.infix)
until p2 ~= pflat
local e2 = pflat.builder (list)
local lli = lx:lineinfo_left()
return transform (transform (e2, pflat, fli, lli), self, fli, lli)
-----------------------------------------
-- Handle regular infix operators: [e] the LHS is known,
-- just gather the operator and [e2] the RHS.
-- Result goes in [e3].
-----------------------------------------
elseif p2.prec and p2.prec>prec or
p2.prec==prec and p2.assoc=="right" then
local fli = e.lineinfo.first -- lx:lineinfo_right()
local op = p2_func(lx)
if not op then return false end
local e2 = self:parse (lx, p2.prec)
local e3 = p2.builder (e, op, e2)
local lli = lx:lineinfo_left()
return transform (transform (e3, p2, fli, lli), self, fli, lli)
-----------------------------------------
-- Check for non-associative operators, and complain if applicable.
-----------------------------------------
elseif p2.assoc=="none" and p2.prec==prec then
M.parse_error (lx, "non-associative operator!")
-----------------------------------------
-- No infix operator suitable at that precedence
-----------------------------------------
else return false end
end --</expr.parse.handle_infix>
------------------------------------------------------
-- Look for a suffix sequence.
-- Return the result of suffix operator on [e],
-- or false if no operator was found.
------------------------------------------------------
local function handle_suffix (e)
-- FIXME bad fli, must take e.lineinfo.first
local p2_func, p2 = get_parser_info (self.suffix)
if not p2 then return false end
if not p2.prec or p2.prec>=prec then
--local fli = lx:lineinfo_right()
local fli = e.lineinfo.first
local op = p2_func(lx)
if not op then return false end
local lli = lx:lineinfo_left()
e = p2.builder (e, op)
e = transform (transform (e, p2, fli, lli), self, fli, lli)
return e
end
return false
end --</expr.parse.handle_suffix>
------------------------------------------------------
-- Parser body: read suffix and (infix+operand)
-- extensions as long as we're able to fetch more at
-- this precedence level.
------------------------------------------------------
local e = handle_prefix()
repeat
local x = handle_suffix (e); e = x or e
local y = handle_infix (e); e = y or e
until not (x or y)
-- No transform: it already happened in operators handling
return e
end --</expr.parse>
-------------------------------------------------------------------
-- Construction
-------------------------------------------------------------------
if not p.primary then p.primary=p[1]; p[1]=nil end
for _, t in ipairs{ "primary", "prefix", "infix", "suffix" } do
if not p[t] then p[t] = { } end
if not M.is_parser(p[t]) then M.multisequence(p[t]) end
end
function p:add(...) return self.primary:add(...) end
return p
end --</expr>
-------------------------------------------------------------------------------
--
-- List parser generator
--
-------------------------------------------------------------------------------
-- In [p], the following fields can be provided in input:
--
-- * [builder]: takes list of subparser results, returns AST
-- * [transformers]: as usual
-- * [name]: as usual
--
-- * [terminators]: list of strings representing the keywords which
-- might mark the end of the list. When non-empty, the list is
-- allowed to be empty. A string is treated as a single-element
-- table, whose element is that string, e.g. ["do"] is the same as
-- [{"do"}].
--
-- * [separators]: list of strings representing the keywords which can
-- separate elements of the list. When non-empty, one of these
-- keyword has to be found between each element. Lack of a separator
-- indicates the end of the list. A string is treated as a
-- single-element table, whose element is that string, e.g. ["do"]
-- is the same as [{"do"}]. If [terminators] is empty/nil, then
-- [separators] has to be non-empty.
--
-- After creation, the following fields are added:
-- * [parse] the parsing function lexer->AST
-- * [kind] == "list"
--
-------------------------------------------------------------------------------
function M.list (p)
M.make_parser ("list", p)
-------------------------------------------------------------------
-- Parsing method
-------------------------------------------------------------------
function p :parse (lx)
------------------------------------------------------
-- Used to quickly check whether there's a terminator
-- or a separator immediately ahead
------------------------------------------------------
local function peek_is_in (keywords)
return keywords and lx:is_keyword(lx:peek(), unpack(keywords)) end
local x = { }
local fli = lx :lineinfo_right()
-- if there's a terminator to start with, don't bother trying
local is_empty_list = self.terminators and (peek_is_in (self.terminators) or lx:peek().tag=="Eof")
if not is_empty_list then
repeat
local item = self.primary(lx)
table.insert (x, item) -- read one element
until
-- There's a separator list specified, and next token isn't in it.
-- Otherwise, consume it with [lx:next()]
self.separators and not(peek_is_in (self.separators) and lx:next()) or
-- Terminator token ahead
peek_is_in (self.terminators) or
-- Last reason: end of file reached
lx:peek().tag=="Eof"
end
local lli = lx:lineinfo_left()
-- Apply the builder. It can be a string, or a callable value,
-- or simply nothing.
local b = self.builder
if b then
if type(b)=="string" then x.tag = b -- b is a string, use it as a tag
elseif type(b)=="function" then x=b(x)
else
local bmt = getmetatable(b)
if bmt and bmt.__call then x=b(x) end
end
end
return transform (x, self, fli, lli)
end --</list.parse>
-------------------------------------------------------------------
-- Construction
-------------------------------------------------------------------
if not p.primary then p.primary = p[1]; p[1] = nil end
if type(p.terminators) == "string" then p.terminators = { p.terminators }
elseif p.terminators and #p.terminators == 0 then p.terminators = nil end
if type(p.separators) == "string" then p.separators = { p.separators }
elseif p.separators and #p.separators == 0 then p.separators = nil end
return p
end --</list>
-------------------------------------------------------------------------------
--
-- Keyword-conditioned parser generator
--
-------------------------------------------------------------------------------
--
-- Only apply a parser if a given keyword is found. The result of
-- [gg.onkeyword] parser is the result of the subparser (modulo
-- [transformers] applications).
--
-- lineinfo: the keyword is *not* included in the boundaries of the
-- resulting lineinfo. A review of all usages of gg.onkeyword() in the
-- implementation of metalua has shown that it was the appropriate choice
-- in every case.
--
-- Input fields:
--
-- * [name]: as usual
--
-- * [transformers]: as usual
--
-- * [peek]: if non-nil, the conditioning keyword is left in the lexeme
-- stream instead of being consumed.
--
-- * [primary]: the subparser.
--
-- * [keywords]: list of strings representing triggering keywords.
--
-- * Table-part entries can contain strings, and/or exactly one parser.
-- Strings are put in [keywords], and the parser is put in [primary].
--
-- After the call, the following fields will be set:
--
-- * [parse] the parsing method
-- * [kind] == "onkeyword"
-- * [primary]
-- * [keywords]
--
-------------------------------------------------------------------------------
function M.onkeyword (p)
M.make_parser ("onkeyword", p)
-------------------------------------------------------------------
-- Parsing method
-------------------------------------------------------------------
function p :parse (lx)
if lx :is_keyword (lx:peek(), unpack(self.keywords)) then
local fli = lx:lineinfo_right()
if not self.peek then lx:next() end
local content = self.primary (lx)
local lli = lx:lineinfo_left()
local li = content.lineinfo or { }
fli, lli = li.first or fli, li.last or lli
return transform (content, p, fli, lli)
else return false end
end
-------------------------------------------------------------------
-- Construction
-------------------------------------------------------------------
if not p.keywords then p.keywords = { } end
for _, x in ipairs(p) do
if type(x)=="string" then table.insert (p.keywords, x)
else assert (not p.primary and M.is_parser (x)); p.primary = x end
end
assert (next (p.keywords), "Missing trigger keyword in gg.onkeyword")
assert (p.primary, 'no primary parser in gg.onkeyword')
return p
end --</onkeyword>
-------------------------------------------------------------------------------
--
-- Optional keyword consummer pseudo-parser generator
--
-------------------------------------------------------------------------------
--
-- This doesn't return a real parser, just a function. That function parses
-- one of the keywords passed as parameters, and returns it. It returns
-- [false] if no matching keyword is found.
--
-- Notice that tokens returned by lexer already carry lineinfo, therefore
-- there's no need to add them, as done usually through transform() calls.
-------------------------------------------------------------------------------
function M.optkeyword (...)
local args = {...}
if type (args[1]) == "table" then
assert (#args == 1)
args = args[1]
end
for _, v in ipairs(args) do assert (type(v)=="string") end
return function (lx)
local x = lx:is_keyword (lx:peek(), unpack (args))
if x then lx:next(); return x
else return false end
end
end
-------------------------------------------------------------------------------
--
-- Run a parser with a special lexer
--
-------------------------------------------------------------------------------
--
-- This doesn't return a real parser, just a function.
-- First argument is the lexer class to be used with the parser,
-- 2nd is the parser itself.
-- The resulting parser returns whatever the argument parser does.
--
-------------------------------------------------------------------------------
function M.with_lexer(new_lexer, parser)
-------------------------------------------------------------------
-- Most gg functions take their parameters in a table, so it's
-- better to silently accept when with_lexer{ } is called with
-- its arguments in a list:
-------------------------------------------------------------------
if not parser and #new_lexer==2 and type(new_lexer[1])=='table' then
return M.with_lexer(unpack(new_lexer))
end
-------------------------------------------------------------------
-- Save the current lexer, switch it for the new one, run the parser,
-- restore the previous lexer, even if the parser caused an error.
-------------------------------------------------------------------
return function (lx)
local old_lexer = getmetatable(lx)
lx:sync()
setmetatable(lx, new_lexer)
local status, result = pcall(parser, lx)
lx:sync()
setmetatable(lx, old_lexer)
if status then return result else error(result) end
end
end
--------------------------------------------------------------------------------
--
-- Make sure a parser is used and returns successfully.
--
--------------------------------------------------------------------------------
function M.nonempty(primary)
local p = M.make_parser('non-empty list', { primary = primary, name=primary.name })
function p :parse (lx)
local fli = lx:lineinfo_right()
local content = self.primary (lx)
local lli = lx:lineinfo_left()
local li = content.lineinfo or { }
fli, lli = li.first or fli, li.last or lli
if #content == 0 then
M.parse_error (lx, "`%s' must not be empty.", self.name or "list")
else
return transform (content, self, fli, lli)
end
end
return p
end
local FUTURE_MT = { }
function FUTURE_MT:__tostring() return "<Proxy parser module>" end
function FUTURE_MT:__newindex(key, value) error "don't write in futures" end
function FUTURE_MT :__index (parser_name)
return function(...)
local p, m = rawget(self, '__path'), self.__module
if p then for _, name in ipairs(p) do
m=rawget(m, name)
if not m then error ("Submodule '"..name.."' undefined") end
end end
local f = rawget(m, parser_name)
if not f then error ("Parser '"..parser_name.."' undefined") end
return f(...)
end
end
function M.future(module, ...)
checks('table')
local path = ... and {...}
if path then for _, x in ipairs(path) do
assert(type(x)=='string', "Bad future arg")
end end
local self = { __module = module,
__path = path }
return setmetatable(self, FUTURE_MT)
end
return M

View File

@ -0,0 +1,678 @@
-------------------------------------------------------------------------------
-- Copyright (c) 2006-2013 Fabien Fleutot and others.
--
-- All rights reserved.
--
-- This program and the accompanying materials are made available
-- under the terms of the Eclipse Public License v1.0 which
-- accompanies this distribution, and is available at
-- http://www.eclipse.org/legal/epl-v10.html
--
-- This program and the accompanying materials are also made available
-- under the terms of the MIT public license which accompanies this
-- distribution, and is available at http://www.lua.org/license.html
--
-- Contributors:
-- Fabien Fleutot - API and implementation
--
-------------------------------------------------------------------------------
require 'checks'
local M = { }
local lexer = { alpha={ }, sym={ } }
lexer.__index=lexer
lexer.__type='lexer.stream'
M.lexer = lexer
local debugf = function() end
-- local debugf=printf
----------------------------------------------------------------------
-- Some locale settings produce bad results, e.g. French locale
-- expect float numbers to use commas instead of periods.
-- TODO: change number parser into something loclae-independent,
-- locales are nasty.
----------------------------------------------------------------------
os.setlocale('C')
local MT = { }
M.metatables=MT
----------------------------------------------------------------------
-- Create a new metatable, for a new class of objects.
----------------------------------------------------------------------
local function new_metatable(name)
local mt = { __type = 'lexer.'..name };
mt.__index = mt
MT[name] = mt
end
----------------------------------------------------------------------
-- Position: represent a point in a source file.
----------------------------------------------------------------------
new_metatable 'position'
local position_idx=1
function M.new_position(line, column, offset, source)
checks('number', 'number', 'number', 'string')
local id = position_idx; position_idx = position_idx+1
return setmetatable({line=line, column=column, offset=offset,
source=source, id=id}, MT.position)
end
function MT.position :__tostring()
return string.format("<%s%s|L%d|C%d|K%d>",
self.comments and "C|" or "",
self.source, self.line, self.column, self.offset)
end
----------------------------------------------------------------------
-- Position factory: convert offsets into line/column/offset positions.
----------------------------------------------------------------------
new_metatable 'position_factory'
function M.new_position_factory(src, src_name)
-- assert(type(src)=='string')
-- assert(type(src_name)=='string')
local lines = { 1 }
for offset in src :gmatch '\n()' do table.insert(lines, offset) end
local max = #src+1
table.insert(lines, max+1) -- +1 includes Eof
return setmetatable({ src_name=src_name, line2offset=lines, max=max },
MT.position_factory)
end
function MT.position_factory :get_position (offset)
-- assert(type(offset)=='number')
assert(offset<=self.max)
local line2offset = self.line2offset
local left = self.last_left or 1
if offset<line2offset[left] then left=1 end
local right = left+1
if line2offset[right]<=offset then right = right+1 end
if line2offset[right]<=offset then right = #line2offset end
while true do
-- print (" trying lines "..left.."/"..right..", offsets "..line2offset[left]..
-- "/"..line2offset[right].." for offset "..offset)
-- assert(line2offset[left]<=offset)
-- assert(offset<line2offset[right])
-- assert(left<right)
if left+1==right then break end
local middle = math.floor((left+right)/2)
if line2offset[middle]<=offset then left=middle else right=middle end
end
-- assert(left+1==right)
-- printf("found that offset %d is between %d and %d, hence on line %d",
-- offset, line2offset[left], line2offset[right], left)
local line = left
local column = offset - line2offset[line] + 1
self.last_left = left
return M.new_position(line, column, offset, self.src_name)
end
----------------------------------------------------------------------
-- Lineinfo: represent a node's range in a source file;
-- embed information about prefix and suffix comments.
----------------------------------------------------------------------
new_metatable 'lineinfo'
function M.new_lineinfo(first, last)
checks('lexer.position', 'lexer.position')
return setmetatable({first=first, last=last}, MT.lineinfo)
end
function MT.lineinfo :__tostring()
local fli, lli = self.first, self.last
local line = fli.line; if line~=lli.line then line =line ..'-'..lli.line end
local column = fli.column; if column~=lli.column then column=column..'-'..lli.column end
local offset = fli.offset; if offset~=lli.offset then offset=offset..'-'..lli.offset end
return string.format("<%s%s|L%s|C%s|K%s%s>",
fli.comments and "C|" or "",
fli.source, line, column, offset,
lli.comments and "|C" or "")
end
----------------------------------------------------------------------
-- Token: atomic Lua language element, with a category, a content,
-- and some lineinfo relating it to its original source.
----------------------------------------------------------------------
new_metatable 'token'
function M.new_token(tag, content, lineinfo)
--printf("TOKEN `%s{ %q, lineinfo = %s} boundaries %d, %d",
-- tag, content, tostring(lineinfo), lineinfo.first.id, lineinfo.last.id)
return setmetatable({tag=tag, lineinfo=lineinfo, content}, MT.token)
end
function MT.token :__tostring()
--return string.format("`%s{ %q, %s }", self.tag, self[1], tostring(self.lineinfo))
return string.format("`%s %q", self.tag, self[1])
end
----------------------------------------------------------------------
-- Comment: series of comment blocks with associated lineinfo.
-- To be attached to the tokens just before and just after them.
----------------------------------------------------------------------
new_metatable 'comment'
function M.new_comment(lines)
local first = lines[1].lineinfo.first
local last = lines[#lines].lineinfo.last
local lineinfo = M.new_lineinfo(first, last)
return setmetatable({lineinfo=lineinfo, unpack(lines)}, MT.comment)
end
function MT.comment :text()
local last_line = self[1].lineinfo.last.line
local acc = { }
for i, line in ipairs(self) do
local nreturns = line.lineinfo.first.line - last_line
table.insert(acc, ("\n"):rep(nreturns))
table.insert(acc, line[1])
end
return table.concat(acc)
end
function M.new_comment_line(text, lineinfo, nequals)
checks('string', 'lexer.lineinfo', '?number')
return { lineinfo = lineinfo, text, nequals }
end
----------------------------------------------------------------------
-- Patterns used by [lexer :extract] to decompose the raw string into
-- correctly tagged tokens.
----------------------------------------------------------------------
lexer.patterns = {
spaces = "^[ \r\n\t]*()",
short_comment = "^%-%-([^\n]*)\n?()",
--final_short_comment = "^%-%-([^\n]*)()$",
long_comment = "^%-%-%[(=*)%[\n?(.-)%]%1%]()",
long_string = "^%[(=*)%[\n?(.-)%]%1%]()",
number_longint = "^%d+[uU]?[lL][lL]()",
number_longint_hex = "^%x+[uU]?[lL][lL]()",
number_mantissa = { "^%d+%.?%d*()", "^%d*%.%d+()" },
number_mantissa_hex = { "^%x+%.?%x*()", "^%x*%.%x+()" }, --Lua5.1 and Lua5.2
number_exponent = "^[eE][%+%-]?%d+()",
number_exponent_hex = "^[pP][%+%-]?%d+()", --Lua5.2
number_hex = "^0[xX]()",
number_imaginary = "^[iI]()",
word = "^([%a_][%w_]*)()",
}
----------------------------------------------------------------------
-- unescape a whole string, applying [unesc_digits] and
-- [unesc_letter] as many times as required.
----------------------------------------------------------------------
local function unescape_string (s)
-- Turn the digits of an escape sequence into the corresponding
-- character, e.g. [unesc_digits("123") == string.char(123)].
local function unesc_digits (backslashes, digits)
if #backslashes%2==0 then
-- Even number of backslashes, they escape each other, not the digits.
-- Return them so that unesc_letter() can treat them
return backslashes..digits
else
-- Remove the odd backslash, which escapes the number sequence.
-- The rest will be returned and parsed by unesc_letter()
backslashes = backslashes :sub (1,-2)
end
local k, j, i = digits :reverse() :byte(1, 3)
local z = string.byte "0"
local code = (k or z) + 10*(j or z) + 100*(i or z) - 111*z
if code > 255 then
error ("Illegal escape sequence '\\"..digits..
"' in string: ASCII codes must be in [0..255]")
end
local c = string.char (code)
if c == '\\' then c = '\\\\' end -- parsed by unesc_letter (test: "\092b" --> "\\b")
return backslashes..c
end
-- Turn hex digits of escape sequence into char.
local function unesc_hex(backslashes, digits)
if #backslashes%2==0 then
return backslashes..'x'..digits
else
backslashes = backslashes :sub (1,-2)
end
local c = string.char(tonumber(digits,16))
if c == '\\' then c = '\\\\' end -- parsed by unesc_letter (test: "\x5cb" --> "\\b")
return backslashes..c
end
-- Handle Lua 5.2 \z sequences
local function unesc_z(backslashes, more)
if #backslashes%2==0 then
return backslashes..more
else
return backslashes :sub (1,-2)
end
end
-- Take a letter [x], and returns the character represented by the
-- sequence ['\\'..x], e.g. [unesc_letter "n" == "\n"].
local function unesc_letter(x)
local t = {
a = "\a", b = "\b", f = "\f",
n = "\n", r = "\r", t = "\t", v = "\v",
["\\"] = "\\", ["'"] = "'", ['"'] = '"', ["\n"] = "\n" }
return t[x] or x
end
s = s: gsub ("(\\+)(z%s*)", unesc_z) -- Lua 5.2
s = s: gsub ("(\\+)([0-9][0-9]?[0-9]?)", unesc_digits)
s = s: gsub ("(\\+)x([0-9a-fA-F][0-9a-fA-F])", unesc_hex) -- Lua 5.2
s = s: gsub ("\\(%D)",unesc_letter)
return s
end
lexer.extractors = {
"extract_long_comment", "extract_short_comment",
"extract_short_string", "extract_word", "extract_number",
"extract_long_string", "extract_symbol" }
----------------------------------------------------------------------
-- Really extract next token from the raw string
-- (and update the index).
-- loc: offset of the position just after spaces and comments
-- previous_i: offset in src before extraction began
----------------------------------------------------------------------
function lexer :extract ()
local attached_comments = { }
local function gen_token(...)
local token = M.new_token(...)
if #attached_comments>0 then -- attach previous comments to token
local comments = M.new_comment(attached_comments)
token.lineinfo.first.comments = comments
if self.lineinfo_last_extracted then
self.lineinfo_last_extracted.comments = comments
end
attached_comments = { }
end
token.lineinfo.first.facing = self.lineinfo_last_extracted
self.lineinfo_last_extracted.facing = assert(token.lineinfo.first)
self.lineinfo_last_extracted = assert(token.lineinfo.last)
return token
end
while true do -- loop until a non-comment token is found
-- skip whitespaces
self.i = self.src:match (self.patterns.spaces, self.i)
if self.i>#self.src then
local fli = self.posfact :get_position (#self.src+1)
local lli = self.posfact :get_position (#self.src+1) -- ok?
local tok = gen_token("Eof", "eof", M.new_lineinfo(fli, lli))
tok.lineinfo.last.facing = lli
return tok
end
local i_first = self.i -- loc = position after whitespaces
-- try every extractor until a token is found
for _, extractor in ipairs(self.extractors) do
local tag, content, xtra = self [extractor] (self)
if tag then
local fli = self.posfact :get_position (i_first)
local lli = self.posfact :get_position (self.i-1)
local lineinfo = M.new_lineinfo(fli, lli)
if tag=='Comment' then
local prev_comment = attached_comments[#attached_comments]
if not xtra -- new comment is short
and prev_comment and not prev_comment[2] -- prev comment is short
and prev_comment.lineinfo.last.line+1==fli.line then -- adjascent lines
-- concat with previous comment
prev_comment[1] = prev_comment[1].."\n"..content -- TODO quadratic, BAD!
prev_comment.lineinfo.last = lli
else -- accumulate comment
local comment = M.new_comment_line(content, lineinfo, xtra)
table.insert(attached_comments, comment)
end
break -- back to skipping spaces
else -- not a comment: real token, then
return gen_token(tag, content, lineinfo)
end -- if token is a comment
end -- if token found
end -- for each extractor
end -- while token is a comment
end -- :extract()
----------------------------------------------------------------------
-- Extract a short comment.
----------------------------------------------------------------------
function lexer :extract_short_comment()
-- TODO: handle final_short_comment
local content, j = self.src :match (self.patterns.short_comment, self.i)
if content then self.i=j; return 'Comment', content, nil end
end
----------------------------------------------------------------------
-- Extract a long comment.
----------------------------------------------------------------------
function lexer :extract_long_comment()
local equals, content, j = self.src:match (self.patterns.long_comment, self.i)
if j then self.i = j; return "Comment", content, #equals end
end
----------------------------------------------------------------------
-- Extract a '...' or "..." short string.
----------------------------------------------------------------------
function lexer :extract_short_string()
local k = self.src :sub (self.i,self.i) -- first char
if k~=[[']] and k~=[["]] then return end -- no match'
local i = self.i + 1
local j = i
while true do
local x,y; x, j, y = self.src :match ("([\\\r\n"..k.."])()(.?)", j) -- next interesting char
if x == '\\' then
if y == 'z' then -- Lua 5.2 \z
j = self.src :match ("^%s*()", j+1)
else
j=j+1 -- escaped char
end
elseif x == k then break -- end of string
else
assert (not x or x=='\r' or x=='\n')
return nil, 'Unterminated string'
end
end
self.i = j
return 'String', unescape_string (self.src :sub (i,j-2))
end
----------------------------------------------------------------------
-- Extract Id or Keyword.
----------------------------------------------------------------------
function lexer :extract_word()
local word, j = self.src:match (self.patterns.word, self.i)
if word then
self.i = j
return (self.alpha [word] and 'Keyword' or 'Id'), word
end
end
----------------------------------------------------------------------
-- Extract Number.
----------------------------------------------------------------------
function lexer :extract_number()
local patt = self.patterns
local s = self.src
local j = s:match(patt.number_hex, self.i)
local hex = j ~= nil
local longint = hex and patt.number_longint_hex or patt.number_longint
local mantissa1 = hex and patt.number_mantissa_hex[1] or patt.number_mantissa[1]
local mantissa2 = hex and patt.number_mantissa_hex[2] or patt.number_mantissa[2]
local exponent = hex and patt.number_exponent_hex or patt.number_exponent
if not hex then j = self.i end
local t = s:match(longint, j)
if t then
j = t
else
j = s:match(mantissa1, j) or s:match(mantissa2, j)
if not j then return end
j = s:match(exponent, j) or j
j = s:match(patt.number_imaginary, j) or j
end
local str = self.src:sub (self.i, j-1)
self.i = j
-- Number found, interpret with tonumber() and return it
-- return str as the fallback when processing formats not supported by the current interpreter
return 'Number', (tonumber (str) or str)
end
----------------------------------------------------------------------
-- Extract long string.
----------------------------------------------------------------------
function lexer :extract_long_string()
local _, content, j = self.src :match (self.patterns.long_string, self.i)
if j then self.i = j; return 'String', content end
end
----------------------------------------------------------------------
-- Extract symbol.
----------------------------------------------------------------------
function lexer :extract_symbol()
local k = self.src:sub (self.i,self.i)
local symk = self.sym [k] -- symbols starting with `k`
if not symk then
self.i = self.i + 1
return 'Keyword', k
end
for _, sym in pairs (symk) do
if sym == self.src:sub (self.i, self.i + #sym - 1) then
self.i = self.i + #sym
return 'Keyword', sym
end
end
self.i = self.i+1
return 'Keyword', k
end
----------------------------------------------------------------------
-- Add a keyword to the list of keywords recognized by the lexer.
----------------------------------------------------------------------
function lexer :add (w, ...)
assert(not ..., "lexer :add() takes only one arg, although possibly a table")
if type (w) == "table" then
for _, x in ipairs (w) do self :add (x) end
else
if w:match (self.patterns.word .. "$") then self.alpha [w] = true
elseif w:match "^%p%p+$" then
local k = w:sub(1,1)
local list = self.sym [k]
if not list then list = { }; self.sym [k] = list end
table.insert (list, w)
elseif w:match "^%p$" then return
else error "Invalid keyword" end
end
end
----------------------------------------------------------------------
-- Return the [n]th next token, without consuming it.
-- [n] defaults to 1. If it goes pass the end of the stream, an EOF
-- token is returned.
----------------------------------------------------------------------
function lexer :peek (n)
if not n then n=1 end
if n > #self.peeked then
for i = #self.peeked+1, n do
self.peeked [i] = self :extract()
end
end
return self.peeked [n]
end
----------------------------------------------------------------------
-- Return the [n]th next token, removing it as well as the 0..n-1
-- previous tokens. [n] defaults to 1. If it goes pass the end of the
-- stream, an EOF token is returned.
----------------------------------------------------------------------
function lexer :next (n)
n = n or 1
self :peek (n)
local a
for i=1,n do
a = table.remove (self.peeked, 1)
-- TODO: is this used anywhere? I think not. a.lineinfo.last may be nil.
--self.lastline = a.lineinfo.last.line
end
self.lineinfo_last_consumed = a.lineinfo.last
return a
end
----------------------------------------------------------------------
-- Returns an object which saves the stream's current state.
----------------------------------------------------------------------
-- FIXME there are more fields than that to save
function lexer :save () return { self.i; {unpack(self.peeked) } } end
----------------------------------------------------------------------
-- Restore the stream's state, as saved by method [save].
----------------------------------------------------------------------
-- FIXME there are more fields than that to restore
function lexer :restore (s) self.i=s[1]; self.peeked=s[2] end
----------------------------------------------------------------------
-- Resynchronize: cancel any token in self.peeked, by emptying the
-- list and resetting the indexes
----------------------------------------------------------------------
function lexer :sync()
local p1 = self.peeked[1]
if p1 then
local li_first = p1.lineinfo.first
if li_first.comments then li_first=li_first.comments.lineinfo.first end
self.i = li_first.offset
self.column_offset = self.i - li_first.column
self.peeked = { }
self.attached_comments = p1.lineinfo.first.comments or { }
end
end
----------------------------------------------------------------------
-- Take the source and offset of an old lexer.
----------------------------------------------------------------------
function lexer :takeover(old)
self :sync(); old :sync()
for _, field in ipairs{ 'i', 'src', 'attached_comments', 'posfact' } do
self[field] = old[field]
end
return self
end
----------------------------------------------------------------------
-- Return the current position in the sources. This position is between
-- two tokens, and can be within a space / comment area, and therefore
-- have a non-null width. :lineinfo_left() returns the beginning of the
-- separation area, :lineinfo_right() returns the end of that area.
--
-- ____ last consummed token ____ first unconsummed token
-- / /
-- XXXXX <spaces and comments> YYYYY
-- \____ \____
-- :lineinfo_left() :lineinfo_right()
----------------------------------------------------------------------
function lexer :lineinfo_right()
return self :peek(1).lineinfo.first
end
function lexer :lineinfo_left()
return self.lineinfo_last_consumed
end
----------------------------------------------------------------------
-- Create a new lexstream.
----------------------------------------------------------------------
function lexer :newstream (src_or_stream, name)
name = name or "?"
if type(src_or_stream)=='table' then -- it's a stream
return setmetatable ({ }, self) :takeover (src_or_stream)
elseif type(src_or_stream)=='string' then -- it's a source string
local src = src_or_stream
local pos1 = M.new_position(1, 1, 1, name)
local stream = {
src_name = name; -- Name of the file
src = src; -- The source, as a single string
peeked = { }; -- Already peeked, but not discarded yet, tokens
i = 1; -- Character offset in src
attached_comments = { },-- comments accumulator
lineinfo_last_extracted = pos1,
lineinfo_last_consumed = pos1,
posfact = M.new_position_factory (src_or_stream, name)
}
setmetatable (stream, self)
-- Skip initial sharp-bang for Unix scripts
-- FIXME: redundant with mlp.chunk()
if src and src :match "^#!" then
local endofline = src :find "\n"
stream.i = endofline and (endofline + 1) or #src
end
return stream
else
assert(false, ":newstream() takes a source string or a stream, not a "..
type(src_or_stream))
end
end
----------------------------------------------------------------------
-- If there's no ... args, return the token a (whose truth value is
-- true) if it's a `Keyword{ }, or nil. If there are ... args, they
-- have to be strings. if the token a is a keyword, and it's content
-- is one of the ... args, then returns it (it's truth value is
-- true). If no a keyword or not in ..., return nil.
----------------------------------------------------------------------
function lexer :is_keyword (a, ...)
if not a or a.tag ~= "Keyword" then return false end
local words = {...}
if #words == 0 then return a[1] end
for _, w in ipairs (words) do
if w == a[1] then return w end
end
return false
end
----------------------------------------------------------------------
-- Cause an error if the next token isn't a keyword whose content
-- is listed among ... args (which have to be strings).
----------------------------------------------------------------------
function lexer :check (...)
local words = {...}
local a = self :next()
local function err ()
error ("Got " .. tostring (a) ..
", expected one of these keywords : '" ..
table.concat (words,"', '") .. "'") end
if not a or a.tag ~= "Keyword" then err () end
if #words == 0 then return a[1] end
for _, w in ipairs (words) do
if w == a[1] then return w end
end
err ()
end
----------------------------------------------------------------------
--
----------------------------------------------------------------------
function lexer :clone()
local alpha_clone, sym_clone = { }, { }
for word in pairs(self.alpha) do alpha_clone[word]=true end
for letter, list in pairs(self.sym) do sym_clone[letter] = { unpack(list) } end
local clone = { alpha=alpha_clone, sym=sym_clone }
setmetatable(clone, self)
clone.__index = clone
return clone
end
----------------------------------------------------------------------
-- Cancel everything left in a lexer, all subsequent attempts at
-- `:peek()` or `:next()` will return `Eof`.
----------------------------------------------------------------------
function lexer :kill()
self.i = #self.src+1
self.peeked = { }
self.attached_comments = { }
self.lineinfo_last = self.posfact :get_position (#self.src+1)
end
return M

View File

@ -0,0 +1,295 @@
-------------------------------------------------------------------------------
-- Copyright (c) 2006-2013 Fabien Fleutot and others.
--
-- All rights reserved.
--
-- This program and the accompanying materials are made available
-- under the terms of the Eclipse Public License v1.0 which
-- accompanies this distribution, and is available at
-- http://www.eclipse.org/legal/epl-v10.html
--
-- This program and the accompanying materials are also made available
-- under the terms of the MIT public license which accompanies this
-- distribution, and is available at http://www.lua.org/license.html
--
-- Contributors:
-- Fabien Fleutot - API and implementation
--
----------------------------------------------------------------------
----------------------------------------------------------------------
----------------------------------------------------------------------
--
-- Lua objects pretty-printer
--
----------------------------------------------------------------------
----------------------------------------------------------------------
local M = { }
M.DEFAULT_CFG = {
hide_hash = false; -- Print the non-array part of tables?
metalua_tag = true; -- Use Metalua's backtick syntax sugar?
fix_indent = nil; -- If a number, number of indentation spaces;
-- If false, indent to the previous brace.
line_max = nil; -- If a number, tries to avoid making lines with
-- more than this number of chars.
initial_indent = 0; -- If a number, starts at this level of indentation
keywords = { }; -- Set of keywords which must not use Lua's field
-- shortcuts {["foo"]=...} -> {foo=...}
}
local function valid_id(cfg, x)
if type(x) ~= "string" then return false end
if not x:match "^[a-zA-Z_][a-zA-Z0-9_]*$" then return false end
if cfg.keywords and cfg.keywords[x] then return false end
return true
end
local __tostring_cache = setmetatable({ }, {__mode='k'})
-- Retrieve the string produced by `__tostring` metamethod if present,
-- return `false` otherwise. Cached in `__tostring_cache`.
local function __tostring(x)
local the_string = __tostring_cache[x]
if the_string~=nil then return the_string end
local mt = getmetatable(x)
if mt then
local __tostring = mt.__tostring
if __tostring then
the_string = __tostring(x)
__tostring_cache[x] = the_string
return the_string
end
end
if x~=nil then __tostring_cache[x] = false end -- nil is an illegal key
return false
end
local xlen -- mutually recursive with `xlen_type`
local xlen_cache = setmetatable({ }, {__mode='k'})
-- Helpers for the `xlen` function
local xlen_type = {
["nil"] = function ( ) return 3 end;
number = function (x) return #tostring(x) end;
boolean = function (x) return x and 4 or 5 end;
string = function (x) return #string.format("%q",x) end;
}
function xlen_type.table (adt, cfg, nested)
local custom_string = __tostring(adt)
if custom_string then return #custom_string end
-- Circular referenced objects are printed with the plain
-- `tostring` function in nested positions.
if nested [adt] then return #tostring(adt) end
nested [adt] = true
local has_tag = cfg.metalua_tag and valid_id(cfg, adt.tag)
local alen = #adt
local has_arr = alen>0
local has_hash = false
local x = 0
if not cfg.hide_hash then
-- first pass: count hash-part
for k, v in pairs(adt) do
if k=="tag" and has_tag then
-- this is the tag -> do nothing!
elseif type(k)=="number" and k<=alen and math.fmod(k,1)==0 and k>0 then
-- array-part pair -> do nothing!
else
has_hash = true
if valid_id(cfg, k) then x=x+#k
else x = x + xlen (k, cfg, nested) + 2 end -- count surrounding brackets
x = x + xlen (v, cfg, nested) + 5 -- count " = " and ", "
end
end
end
for i = 1, alen do x = x + xlen (adt[i], nested) + 2 end -- count ", "
nested[adt] = false -- No more nested calls
if not (has_tag or has_arr or has_hash) then return 3 end
if has_tag then x=x+#adt.tag+1 end
if not (has_arr or has_hash) then return x end
if not has_hash and alen==1 and type(adt[1])~="table" then
return x-2 -- substract extraneous ", "
end
return x+2 -- count "{ " and " }", substract extraneous ", "
end
-- Compute the number of chars it would require to display the table
-- on a single line. Helps to decide whether some carriage returns are
-- required. Since the size of each sub-table is required many times,
-- it's cached in [xlen_cache].
xlen = function (x, cfg, nested)
-- no need to compute length for 1-line prints
if not cfg.line_max then return 0 end
nested = nested or { }
if x==nil then return #"nil" end
local len = xlen_cache[x]
if len then return len end
local f = xlen_type[type(x)]
if not f then return #tostring(x) end
len = f (x, cfg, nested)
xlen_cache[x] = len
return len
end
local function consider_newline(p, len)
if not p.cfg.line_max then return end
if p.current_offset + len <= p.cfg.line_max then return end
if p.indent < p.current_offset then
p:acc "\n"; p:acc ((" "):rep(p.indent))
p.current_offset = p.indent
end
end
local acc_value
local acc_type = {
["nil"] = function(p) p:acc("nil") end;
number = function(p, adt) p:acc (tostring (adt)) end;
string = function(p, adt) p:acc ((string.format ("%q", adt):gsub("\\\n", "\\n"))) end;
boolean = function(p, adt) p:acc (adt and "true" or "false") end }
-- Indentation:
-- * if `cfg.fix_indent` is set to a number:
-- * add this number of space for each level of depth
-- * return to the line as soon as it flushes things further left
-- * if not, tabulate to one space after the opening brace.
-- * as a result, it never saves right-space to return before first element
function acc_type.table(p, adt)
if p.nested[adt] then p:acc(tostring(adt)); return end
p.nested[adt] = true
local has_tag = p.cfg.metalua_tag and valid_id(p.cfg, adt.tag)
local alen = #adt
local has_arr = alen>0
local has_hash = false
local previous_indent = p.indent
if has_tag then p:acc("`"); p:acc(adt.tag) end
local function indent(p)
if not p.cfg.fix_indent then p.indent = p.current_offset
else p.indent = p.indent + p.cfg.fix_indent end
end
-- First pass: handle hash-part
if not p.cfg.hide_hash then
for k, v in pairs(adt) do
if has_tag and k=='tag' then -- pass the 'tag' field
elseif type(k)=="number" and k<=alen and k>0 and math.fmod(k,1)==0 then
-- pass array-part keys (consecutive ints less than `#adt`)
else -- hash-part keys
if has_hash then p:acc ", " else -- 1st hash-part pair ever found
p:acc "{ "; indent(p)
end
-- Determine whether a newline is required
local is_id, expected_len=valid_id(p.cfg, k)
if is_id then expected_len=#k+xlen(v, p.cfg, p.nested)+#" = , "
else expected_len = xlen(k, p.cfg, p.nested)+xlen(v, p.cfg, p.nested)+#"[] = , " end
consider_newline(p, expected_len)
-- Print the key
if is_id then p:acc(k); p:acc " = " else
p:acc "["; acc_value (p, k); p:acc "] = "
end
acc_value (p, v) -- Print the value
has_hash = true
end
end
end
-- Now we know whether there's a hash-part, an array-part, and a tag.
-- Tag and hash-part are already printed if they're present.
if not has_tag and not has_hash and not has_arr then p:acc "{ }";
elseif has_tag and not has_hash and not has_arr then -- nothing, tag already in acc
else
assert (has_hash or has_arr) -- special case { } already handled
local no_brace = false
if has_hash and has_arr then p:acc ", "
elseif has_tag and not has_hash and alen==1 and type(adt[1])~="table" then
-- No brace required; don't print "{", remember not to print "}"
p:acc (" "); acc_value (p, adt[1]) -- indent= indent+(cfg.fix_indent or 0))
no_brace = true
elseif not has_hash then
-- Braces required, but not opened by hash-part handler yet
p:acc "{ "; indent(p)
end
-- 2nd pass: array-part
if not no_brace and has_arr then
local expected_len = xlen(adt[1], p.cfg, p.nested)
consider_newline(p, expected_len)
acc_value(p, adt[1]) -- indent+(cfg.fix_indent or 0)
for i=2, alen do
p:acc ", ";
consider_newline(p, xlen(adt[i], p.cfg, p.nested))
acc_value (p, adt[i]) --indent+(cfg.fix_indent or 0)
end
end
if not no_brace then p:acc " }" end
end
p.nested[adt] = false -- No more nested calls
p.indent = previous_indent
end
function acc_value(p, v)
local custom_string = __tostring(v)
if custom_string then p:acc(custom_string) else
local f = acc_type[type(v)]
if f then f(p, v) else p:acc(tostring(v)) end
end
end
-- FIXME: new_indent seems to be always nil?!s detection
-- FIXME: accumulator function should be configurable,
-- so that print() doesn't need to bufferize the whole string
-- before starting to print.
function M.tostring(t, cfg)
cfg = cfg or M.DEFAULT_CFG or { }
local p = {
cfg = cfg;
indent = 0;
current_offset = cfg.initial_indent or 0;
buffer = { };
nested = { };
acc = function(self, str)
table.insert(self.buffer, str)
self.current_offset = self.current_offset + #str
end;
}
acc_value(p, t)
return table.concat(p.buffer)
end
function M.print(...) return print(M.tostring(...)) end
function M.sprintf(fmt, ...)
local args={...}
for i, v in pairs(args) do
local t=type(v)
if t=='table' then args[i]=M.tostring(v)
elseif t=='nil' then args[i]='nil' end
end
return string.format(fmt, unpack(args))
end
function M.printf(...) print(M.sprintf(...)) end
return M

View File

@ -0,0 +1,90 @@
-----------------------------------------------------------------------------
-- MIME support for the Lua language.
-- Author: Diego Nehab
-- Conforming to RFCs 2045-2049
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
-- Declare module and import dependencies
-----------------------------------------------------------------------------
local base = _G
local ltn12 = require("ltn12")
local mime = require("mime.core")
local io = require("io")
local string = require("string")
local _M = mime
-- encode, decode and wrap algorithm tables
local encodet, decodet, wrapt = {},{},{}
_M.encodet = encodet
_M.decodet = decodet
_M.wrapt = wrapt
-- creates a function that chooses a filter by name from a given table
local function choose(table)
return function(name, opt1, opt2)
if base.type(name) ~= "string" then
name, opt1, opt2 = "default", name, opt1
end
local f = table[name or "nil"]
if not f then
base.error("unknown key (" .. base.tostring(name) .. ")", 3)
else return f(opt1, opt2) end
end
end
-- define the encoding filters
encodet['base64'] = function()
return ltn12.filter.cycle(_M.b64, "")
end
encodet['quoted-printable'] = function(mode)
return ltn12.filter.cycle(_M.qp, "",
(mode == "binary") and "=0D=0A" or "\r\n")
end
-- define the decoding filters
decodet['base64'] = function()
return ltn12.filter.cycle(_M.unb64, "")
end
decodet['quoted-printable'] = function()
return ltn12.filter.cycle(_M.unqp, "")
end
local function format(chunk)
if chunk then
if chunk == "" then return "''"
else return string.len(chunk) end
else return "nil" end
end
-- define the line-wrap filters
wrapt['text'] = function(length)
length = length or 76
return ltn12.filter.cycle(_M.wrp, length, length)
end
wrapt['base64'] = wrapt['text']
wrapt['default'] = wrapt['text']
wrapt['quoted-printable'] = function()
return ltn12.filter.cycle(_M.qpwrp, 76, 76)
end
-- function that choose the encoding, decoding or wrap algorithm
_M.encode = choose(encodet)
_M.decode = choose(decodet)
_M.wrap = choose(wrapt)
-- define the end-of-line normalization filter
function _M.normalize(marker)
return ltn12.filter.cycle(_M.eol, 0, marker)
end
-- high level stuffing filter
function _M.stuff()
return ltn12.filter.cycle(_M.dot, 2)
end
return _M

View File

@ -0,0 +1,239 @@
-- SHA-256 code in Lua 5.2; based on the pseudo-code from
-- Wikipedia (http://en.wikipedia.org/wiki/SHA-2)
-- from http://lua-users.org/wiki/SecureHashAlgorithm
local band, rrotate, bxor, rshift, bnot =
bit32.band, bit32.rrotate, bit32.bxor, bit32.rshift, bit32.bnot
local string, setmetatable, assert = string, setmetatable, assert
-- Initialize table of round constants
-- (first 32 bits of the fractional parts of the cube roots of the first
-- 64 primes 2..311):
local k = {
0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5,
0x3956c25b, 0x59f111f1, 0x923f82a4, 0xab1c5ed5,
0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3,
0x72be5d74, 0x80deb1fe, 0x9bdc06a7, 0xc19bf174,
0xe49b69c1, 0xefbe4786, 0x0fc19dc6, 0x240ca1cc,
0x2de92c6f, 0x4a7484aa, 0x5cb0a9dc, 0x76f988da,
0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7,
0xc6e00bf3, 0xd5a79147, 0x06ca6351, 0x14292967,
0x27b70a85, 0x2e1b2138, 0x4d2c6dfc, 0x53380d13,
0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85,
0xa2bfe8a1, 0xa81a664b, 0xc24b8b70, 0xc76c51a3,
0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070,
0x19a4c116, 0x1e376c08, 0x2748774c, 0x34b0bcb5,
0x391c0cb3, 0x4ed8aa4a, 0x5b9cca4f, 0x682e6ff3,
0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208,
0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2,
}
-- transform a string of bytes in a string of hexadecimal digits
local function str2hexa (s)
local h = string.gsub(s, ".", function(c)
return string.format("%02x", string.byte(c))
end)
return h
end
-- transform number 'l' in a big-endian sequence of 'n' bytes
-- (coded as a string)
local function num2s (l, n)
local s = ""
for i = 1, n do
local rem = l % 256
s = string.char(rem) .. s
l = (l - rem) / 256
end
return s
end
-- transform the big-endian sequence of four bytes starting at
-- index 'i' in 's' into a number
local function s232num (s, i)
local n = 0
for i = i, i + 3 do
n = n*256 + string.byte(s, i)
end
return n
end
-- append the bit '1' to the message
-- append k bits '0', where k is the minimum number >= 0 such that the
-- resulting message length (in bits) is congruent to 448 (mod 512)
-- append length of message (before pre-processing), in bits, as 64-bit
-- big-endian integer
local function preproc (msg, len)
local extra = 64 - ((len + 1 + 8) % 64)
len = num2s(8 * len, 8) -- original len in bits, coded
msg = msg .. "\128" .. string.rep("\0", extra) .. len
assert(#msg % 64 == 0)
return msg
end
local function initH224 (H)
-- (second 32 bits of the fractional parts of the square roots of the
-- 9th through 16th primes 23..53)
H[1] = 0xc1059ed8
H[2] = 0x367cd507
H[3] = 0x3070dd17
H[4] = 0xf70e5939
H[5] = 0xffc00b31
H[6] = 0x68581511
H[7] = 0x64f98fa7
H[8] = 0xbefa4fa4
return H
end
local function initH256 (H)
-- (first 32 bits of the fractional parts of the square roots of the
-- first 8 primes 2..19):
H[1] = 0x6a09e667
H[2] = 0xbb67ae85
H[3] = 0x3c6ef372
H[4] = 0xa54ff53a
H[5] = 0x510e527f
H[6] = 0x9b05688c
H[7] = 0x1f83d9ab
H[8] = 0x5be0cd19
return H
end
local function digestblock (msg, i, H)
-- break chunk into sixteen 32-bit big-endian words w[1..16]
local w = {}
for j = 1, 16 do
w[j] = s232num(msg, i + (j - 1)*4)
end
-- Extend the sixteen 32-bit words into sixty-four 32-bit words:
for j = 17, 64 do
local v = w[j - 15]
local s0 = bxor(rrotate(v, 7), rrotate(v, 18), rshift(v, 3))
v = w[j - 2]
local s1 = bxor(rrotate(v, 17), rrotate(v, 19), rshift(v, 10))
w[j] = w[j - 16] + s0 + w[j - 7] + s1
end
-- Initialize hash value for this chunk:
local a, b, c, d, e, f, g, h =
H[1], H[2], H[3], H[4], H[5], H[6], H[7], H[8]
-- Main loop:
for i = 1, 64 do
local s0 = bxor(rrotate(a, 2), rrotate(a, 13), rrotate(a, 22))
local maj = bxor(band(a, b), band(a, c), band(b, c))
local t2 = s0 + maj
local s1 = bxor(rrotate(e, 6), rrotate(e, 11), rrotate(e, 25))
local ch = bxor (band(e, f), band(bnot(e), g))
local t1 = h + s1 + ch + k[i] + w[i]
h = g
g = f
f = e
e = d + t1
d = c
c = b
b = a
a = t1 + t2
end
-- Add (mod 2^32) this chunk's hash to result so far:
H[1] = band(H[1] + a)
H[2] = band(H[2] + b)
H[3] = band(H[3] + c)
H[4] = band(H[4] + d)
H[5] = band(H[5] + e)
H[6] = band(H[6] + f)
H[7] = band(H[7] + g)
H[8] = band(H[8] + h)
end
local function finalresult224 (H)
-- Produce the final hash value (big-endian):
return
str2hexa(num2s(H[1], 4)..num2s(H[2], 4)..num2s(H[3], 4)..num2s(H[4], 4)..
num2s(H[5], 4)..num2s(H[6], 4)..num2s(H[7], 4))
end
local function finalresult256 (H)
-- Produce the final hash value (big-endian):
return
str2hexa(num2s(H[1], 4)..num2s(H[2], 4)..num2s(H[3], 4)..num2s(H[4], 4)..
num2s(H[5], 4)..num2s(H[6], 4)..num2s(H[7], 4)..num2s(H[8], 4))
end
----------------------------------------------------------------------
local HH = {} -- to reuse
local function hash224 (msg)
msg = preproc(msg, #msg)
local H = initH224(HH)
-- Process the message in successive 512-bit (64 bytes) chunks:
for i = 1, #msg, 64 do
digestblock(msg, i, H)
end
return finalresult224(H)
end
local function hash256 (msg)
msg = preproc(msg, #msg)
local H = initH256(HH)
-- Process the message in successive 512-bit (64 bytes) chunks:
for i = 1, #msg, 64 do
digestblock(msg, i, H)
end
return finalresult256(H)
end
----------------------------------------------------------------------
local mt = {}
local function new256 ()
local o = {H = initH256({}), msg = "", len = 0}
setmetatable(o, mt)
return o
end
mt.__index = mt
function mt:add (m)
self.msg = self.msg .. m
self.len = self.len + #m
local t = 0
while #self.msg - t >= 64 do
digestblock(self.msg, t + 1, self.H)
t = t + 64
end
self.msg = self.msg:sub(t + 1, -1)
end
function mt:close ()
self.msg = preproc(self.msg, self.len)
self:add("")
return finalresult256(self.H)
end
----------------------------------------------------------------------
return {
hash224 = hash224,
hash256 = hash256,
new256 = new256,
}

View File

@ -0,0 +1,165 @@
-----------------------------------------------------------------------------
-- LuaSocket helper module
-- Author: Diego Nehab
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
-- Declare module and import dependencies
-----------------------------------------------------------------------------
local base = _G
local string = require("string")
local math = require("math")
local socket = require("socket.core")
local _M = socket
-- this is needed in case this library is used when "socket.core" is loaded,
-- but has an older version of luasocket that does not include `connect`.
if not socket.connect then
socket.connect = function (address, port, laddress, lport)
local sock, err = socket.tcp()
if not sock then return nil, err end
if laddress then
local res, err = sock:bind(laddress, lport, -1)
if not res then return nil, err end
end
local res, err = sock:connect(address, port)
if not res then return nil, err end
return sock
end
end
-----------------------------------------------------------------------------
-- Exported auxiliar functions
-----------------------------------------------------------------------------
function _M.connect4(address, port, laddress, lport)
return socket.connect(address, port, laddress, lport, "inet")
end
function _M.connect6(address, port, laddress, lport)
return socket.connect(address, port, laddress, lport, "inet6")
end
function _M.bind(host, port, backlog)
if host == "*" then host = "0.0.0.0" end
local addrinfo, err = socket.dns.getaddrinfo(host);
if not addrinfo then return nil, err end
local sock, res
err = "no info on address"
for i, alt in base.ipairs(addrinfo) do
if alt.family == "inet" then
sock, err = socket.tcp()
else
sock, err = socket.tcp6()
end
if not sock then return nil, err end
sock:setoption("reuseaddr", true)
res, err = sock:bind(alt.addr, port)
if not res then
sock:close()
else
res, err = sock:listen(backlog)
if not res then
sock:close()
else
return sock
end
end
end
return nil, err
end
_M.try = _M.newtry()
function _M.choose(table)
return function(name, opt1, opt2)
if base.type(name) ~= "string" then
name, opt1, opt2 = "default", name, opt1
end
local f = table[name or "nil"]
if not f then base.error("unknown key (".. base.tostring(name) ..")", 3)
else return f(opt1, opt2) end
end
end
-----------------------------------------------------------------------------
-- Socket sources and sinks, conforming to LTN12
-----------------------------------------------------------------------------
-- create namespaces inside LuaSocket namespace
local sourcet, sinkt = {}, {}
_M.sourcet = sourcet
_M.sinkt = sinkt
_M.BLOCKSIZE = 2048
sinkt["close-when-done"] = function(sock)
return base.setmetatable({
getfd = function() return sock:getfd() end,
dirty = function() return sock:dirty() end
}, {
__call = function(self, chunk, err)
if not chunk then
sock:close()
return 1
else return sock:send(chunk) end
end
})
end
sinkt["keep-open"] = function(sock)
return base.setmetatable({
getfd = function() return sock:getfd() end,
dirty = function() return sock:dirty() end
}, {
__call = function(self, chunk, err)
if chunk then return sock:send(chunk)
else return 1 end
end
})
end
sinkt["default"] = sinkt["keep-open"]
_M.sink = _M.choose(sinkt)
sourcet["by-length"] = function(sock, length)
return base.setmetatable({
getfd = function() return sock:getfd() end,
dirty = function() return sock:dirty() end
}, {
__call = function()
if length <= 0 then return nil end
local size = math.min(socket.BLOCKSIZE, length)
local chunk, err = sock:receive(size)
if err then return nil, err end
length = length - string.len(chunk)
return chunk
end
})
end
sourcet["until-closed"] = function(sock)
local done
return base.setmetatable({
getfd = function() return sock:getfd() end,
dirty = function() return sock:dirty() end
}, {
__call = function()
if done then return nil end
local chunk, err, partial = sock:receive(socket.BLOCKSIZE)
if not err then return chunk
elseif err == "closed" then
sock:close()
done = 1
return partial
else return nil, err end
end
})
end
sourcet["default"] = sourcet["until-closed"]
_M.source = _M.choose(sourcet)
return _M

View File

@ -0,0 +1,285 @@
-----------------------------------------------------------------------------
-- FTP support for the Lua language
-- LuaSocket toolkit.
-- Author: Diego Nehab
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
-- Declare module and import dependencies
-----------------------------------------------------------------------------
local base = _G
local table = require("table")
local string = require("string")
local math = require("math")
local socket = require("socket")
local url = require("socket.url")
local tp = require("socket.tp")
local ltn12 = require("ltn12")
socket.ftp = {}
local _M = socket.ftp
-----------------------------------------------------------------------------
-- Program constants
-----------------------------------------------------------------------------
-- timeout in seconds before the program gives up on a connection
_M.TIMEOUT = 60
-- default port for ftp service
_M.PORT = 21
-- this is the default anonymous password. used when no password is
-- provided in url. should be changed to your e-mail.
_M.USER = "ftp"
_M.PASSWORD = "anonymous@anonymous.org"
-----------------------------------------------------------------------------
-- Low level FTP API
-----------------------------------------------------------------------------
local metat = { __index = {} }
function _M.open(server, port, create)
local tp = socket.try(tp.connect(server, port or _M.PORT, _M.TIMEOUT, create))
local f = base.setmetatable({ tp = tp }, metat)
-- make sure everything gets closed in an exception
f.try = socket.newtry(function() f:close() end)
return f
end
function metat.__index:portconnect()
self.try(self.server:settimeout(_M.TIMEOUT))
self.data = self.try(self.server:accept())
self.try(self.data:settimeout(_M.TIMEOUT))
end
function metat.__index:pasvconnect()
self.data = self.try(socket.tcp())
self.try(self.data:settimeout(_M.TIMEOUT))
self.try(self.data:connect(self.pasvt.ip, self.pasvt.port))
end
function metat.__index:login(user, password)
self.try(self.tp:command("user", user or _M.USER))
local code, reply = self.try(self.tp:check{"2..", 331})
if code == 331 then
self.try(self.tp:command("pass", password or _M.PASSWORD))
self.try(self.tp:check("2.."))
end
return 1
end
function metat.__index:pasv()
self.try(self.tp:command("pasv"))
local code, reply = self.try(self.tp:check("2.."))
local pattern = "(%d+)%D(%d+)%D(%d+)%D(%d+)%D(%d+)%D(%d+)"
local a, b, c, d, p1, p2 = socket.skip(2, string.find(reply, pattern))
self.try(a and b and c and d and p1 and p2, reply)
self.pasvt = {
ip = string.format("%d.%d.%d.%d", a, b, c, d),
port = p1*256 + p2
}
if self.server then
self.server:close()
self.server = nil
end
return self.pasvt.ip, self.pasvt.port
end
function metat.__index:port(ip, port)
self.pasvt = nil
if not ip then
ip, port = self.try(self.tp:getcontrol():getsockname())
self.server = self.try(socket.bind(ip, 0))
ip, port = self.try(self.server:getsockname())
self.try(self.server:settimeout(_M.TIMEOUT))
end
local pl = math.mod(port, 256)
local ph = (port - pl)/256
local arg = string.gsub(string.format("%s,%d,%d", ip, ph, pl), "%.", ",")
self.try(self.tp:command("port", arg))
self.try(self.tp:check("2.."))
return 1
end
function metat.__index:send(sendt)
self.try(self.pasvt or self.server, "need port or pasv first")
-- if there is a pasvt table, we already sent a PASV command
-- we just get the data connection into self.data
if self.pasvt then self:pasvconnect() end
-- get the transfer argument and command
local argument = sendt.argument or
url.unescape(string.gsub(sendt.path or "", "^[/\\]", ""))
if argument == "" then argument = nil end
local command = sendt.command or "stor"
-- send the transfer command and check the reply
self.try(self.tp:command(command, argument))
local code, reply = self.try(self.tp:check{"2..", "1.."})
-- if there is not a a pasvt table, then there is a server
-- and we already sent a PORT command
if not self.pasvt then self:portconnect() end
-- get the sink, source and step for the transfer
local step = sendt.step or ltn12.pump.step
local readt = {self.tp.c}
local checkstep = function(src, snk)
-- check status in control connection while downloading
local readyt = socket.select(readt, nil, 0)
if readyt[tp] then code = self.try(self.tp:check("2..")) end
return step(src, snk)
end
local sink = socket.sink("close-when-done", self.data)
-- transfer all data and check error
self.try(ltn12.pump.all(sendt.source, sink, checkstep))
if string.find(code, "1..") then self.try(self.tp:check("2..")) end
-- done with data connection
self.data:close()
-- find out how many bytes were sent
local sent = socket.skip(1, self.data:getstats())
self.data = nil
return sent
end
function metat.__index:receive(recvt)
self.try(self.pasvt or self.server, "need port or pasv first")
if self.pasvt then self:pasvconnect() end
local argument = recvt.argument or
url.unescape(string.gsub(recvt.path or "", "^[/\\]", ""))
if argument == "" then argument = nil end
local command = recvt.command or "retr"
self.try(self.tp:command(command, argument))
local code,reply = self.try(self.tp:check{"1..", "2.."})
if (code >= 200) and (code <= 299) then
recvt.sink(reply)
return 1
end
if not self.pasvt then self:portconnect() end
local source = socket.source("until-closed", self.data)
local step = recvt.step or ltn12.pump.step
self.try(ltn12.pump.all(source, recvt.sink, step))
if string.find(code, "1..") then self.try(self.tp:check("2..")) end
self.data:close()
self.data = nil
return 1
end
function metat.__index:cwd(dir)
self.try(self.tp:command("cwd", dir))
self.try(self.tp:check(250))
return 1
end
function metat.__index:type(type)
self.try(self.tp:command("type", type))
self.try(self.tp:check(200))
return 1
end
function metat.__index:greet()
local code = self.try(self.tp:check{"1..", "2.."})
if string.find(code, "1..") then self.try(self.tp:check("2..")) end
return 1
end
function metat.__index:quit()
self.try(self.tp:command("quit"))
self.try(self.tp:check("2.."))
return 1
end
function metat.__index:close()
if self.data then self.data:close() end
if self.server then self.server:close() end
return self.tp:close()
end
-----------------------------------------------------------------------------
-- High level FTP API
-----------------------------------------------------------------------------
local function override(t)
if t.url then
local u = url.parse(t.url)
for i,v in base.pairs(t) do
u[i] = v
end
return u
else return t end
end
local function tput(putt)
putt = override(putt)
socket.try(putt.host, "missing hostname")
local f = _M.open(putt.host, putt.port, putt.create)
f:greet()
f:login(putt.user, putt.password)
if putt.type then f:type(putt.type) end
f:pasv()
local sent = f:send(putt)
f:quit()
f:close()
return sent
end
local default = {
path = "/",
scheme = "ftp"
}
local function parse(u)
local t = socket.try(url.parse(u, default))
socket.try(t.scheme == "ftp", "wrong scheme '" .. t.scheme .. "'")
socket.try(t.host, "missing hostname")
local pat = "^type=(.)$"
if t.params then
t.type = socket.skip(2, string.find(t.params, pat))
socket.try(t.type == "a" or t.type == "i",
"invalid type '" .. t.type .. "'")
end
return t
end
local function sput(u, body)
local putt = parse(u)
putt.source = ltn12.source.string(body)
return tput(putt)
end
_M.put = socket.protect(function(putt, body)
if base.type(putt) == "string" then return sput(putt, body)
else return tput(putt) end
end)
local function tget(gett)
gett = override(gett)
socket.try(gett.host, "missing hostname")
local f = _M.open(gett.host, gett.port, gett.create)
f:greet()
f:login(gett.user, gett.password)
if gett.type then f:type(gett.type) end
f:pasv()
f:receive(gett)
f:quit()
return f:close()
end
local function sget(u)
local gett = parse(u)
local t = {}
gett.sink = ltn12.sink.table(t)
tget(gett)
return table.concat(t)
end
_M.command = socket.protect(function(cmdt)
cmdt = override(cmdt)
socket.try(cmdt.host, "missing hostname")
socket.try(cmdt.command, "missing command")
local f = open(cmdt.host, cmdt.port, cmdt.create)
f:greet()
f:login(cmdt.user, cmdt.password)
f.try(f.tp:command(cmdt.command, cmdt.argument))
if cmdt.check then f.try(f.tp:check(cmdt.check)) end
f:quit()
return f:close()
end)
_M.get = socket.protect(function(gett)
if base.type(gett) == "string" then return sget(gett)
else return tget(gett) end
end)
return _M

View File

@ -0,0 +1,104 @@
-----------------------------------------------------------------------------
-- Canonic header field capitalization
-- LuaSocket toolkit.
-- Author: Diego Nehab
-----------------------------------------------------------------------------
local socket = require("socket")
socket.headers = {}
local _M = socket.headers
_M.canonic = {
["accept"] = "Accept",
["accept-charset"] = "Accept-Charset",
["accept-encoding"] = "Accept-Encoding",
["accept-language"] = "Accept-Language",
["accept-ranges"] = "Accept-Ranges",
["action"] = "Action",
["alternate-recipient"] = "Alternate-Recipient",
["age"] = "Age",
["allow"] = "Allow",
["arrival-date"] = "Arrival-Date",
["authorization"] = "Authorization",
["bcc"] = "Bcc",
["cache-control"] = "Cache-Control",
["cc"] = "Cc",
["comments"] = "Comments",
["connection"] = "Connection",
["content-description"] = "Content-Description",
["content-disposition"] = "Content-Disposition",
["content-encoding"] = "Content-Encoding",
["content-id"] = "Content-ID",
["content-language"] = "Content-Language",
["content-length"] = "Content-Length",
["content-location"] = "Content-Location",
["content-md5"] = "Content-MD5",
["content-range"] = "Content-Range",
["content-transfer-encoding"] = "Content-Transfer-Encoding",
["content-type"] = "Content-Type",
["cookie"] = "Cookie",
["date"] = "Date",
["diagnostic-code"] = "Diagnostic-Code",
["dsn-gateway"] = "DSN-Gateway",
["etag"] = "ETag",
["expect"] = "Expect",
["expires"] = "Expires",
["final-log-id"] = "Final-Log-ID",
["final-recipient"] = "Final-Recipient",
["from"] = "From",
["host"] = "Host",
["if-match"] = "If-Match",
["if-modified-since"] = "If-Modified-Since",
["if-none-match"] = "If-None-Match",
["if-range"] = "If-Range",
["if-unmodified-since"] = "If-Unmodified-Since",
["in-reply-to"] = "In-Reply-To",
["keywords"] = "Keywords",
["last-attempt-date"] = "Last-Attempt-Date",
["last-modified"] = "Last-Modified",
["location"] = "Location",
["max-forwards"] = "Max-Forwards",
["message-id"] = "Message-ID",
["mime-version"] = "MIME-Version",
["original-envelope-id"] = "Original-Envelope-ID",
["original-recipient"] = "Original-Recipient",
["pragma"] = "Pragma",
["proxy-authenticate"] = "Proxy-Authenticate",
["proxy-authorization"] = "Proxy-Authorization",
["range"] = "Range",
["received"] = "Received",
["received-from-mta"] = "Received-From-MTA",
["references"] = "References",
["referer"] = "Referer",
["remote-mta"] = "Remote-MTA",
["reply-to"] = "Reply-To",
["reporting-mta"] = "Reporting-MTA",
["resent-bcc"] = "Resent-Bcc",
["resent-cc"] = "Resent-Cc",
["resent-date"] = "Resent-Date",
["resent-from"] = "Resent-From",
["resent-message-id"] = "Resent-Message-ID",
["resent-reply-to"] = "Resent-Reply-To",
["resent-sender"] = "Resent-Sender",
["resent-to"] = "Resent-To",
["retry-after"] = "Retry-After",
["return-path"] = "Return-Path",
["sender"] = "Sender",
["server"] = "Server",
["smtp-remote-recipient"] = "SMTP-Remote-Recipient",
["status"] = "Status",
["subject"] = "Subject",
["te"] = "TE",
["to"] = "To",
["trailer"] = "Trailer",
["transfer-encoding"] = "Transfer-Encoding",
["upgrade"] = "Upgrade",
["user-agent"] = "User-Agent",
["vary"] = "Vary",
["via"] = "Via",
["warning"] = "Warning",
["will-retry-until"] = "Will-Retry-Until",
["www-authenticate"] = "WWW-Authenticate",
["x-mailer"] = "X-Mailer",
}
return _M

View File

@ -0,0 +1,354 @@
-----------------------------------------------------------------------------
-- HTTP/1.1 client support for the Lua language.
-- LuaSocket toolkit.
-- Author: Diego Nehab
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
-- Declare module and import dependencies
-------------------------------------------------------------------------------
local socket = require("socket")
local url = require("socket.url")
local ltn12 = require("ltn12")
local mime = require("mime")
local string = require("string")
local headers = require("socket.headers")
local base = _G
local table = require("table")
socket.http = {}
local _M = socket.http
-----------------------------------------------------------------------------
-- Program constants
-----------------------------------------------------------------------------
-- connection timeout in seconds
TIMEOUT = 60
-- default port for document retrieval
_M.PORT = 80
-- user agent field sent in request
_M.USERAGENT = socket._VERSION
-----------------------------------------------------------------------------
-- Reads MIME headers from a connection, unfolding where needed
-----------------------------------------------------------------------------
local function receiveheaders(sock, headers)
local line, name, value, err
headers = headers or {}
-- get first line
line, err = sock:receive()
if err then return nil, err end
-- headers go until a blank line is found
while line ~= "" do
-- get field-name and value
name, value = socket.skip(2, string.find(line, "^(.-):%s*(.*)"))
if not (name and value) then return nil, "malformed reponse headers" end
name = string.lower(name)
-- get next line (value might be folded)
line, err = sock:receive()
if err then return nil, err end
-- unfold any folded values
while string.find(line, "^%s") do
value = value .. line
line = sock:receive()
if err then return nil, err end
end
-- save pair in table
if headers[name] then headers[name] = headers[name] .. ", " .. value
else headers[name] = value end
end
return headers
end
-----------------------------------------------------------------------------
-- Extra sources and sinks
-----------------------------------------------------------------------------
socket.sourcet["http-chunked"] = function(sock, headers)
return base.setmetatable({
getfd = function() return sock:getfd() end,
dirty = function() return sock:dirty() end
}, {
__call = function()
-- get chunk size, skip extention
local line, err = sock:receive()
if err then return nil, err end
local size = base.tonumber(string.gsub(line, ";.*", ""), 16)
if not size then return nil, "invalid chunk size" end
-- was it the last chunk?
if size > 0 then
-- if not, get chunk and skip terminating CRLF
local chunk, err, part = sock:receive(size)
if chunk then sock:receive() end
return chunk, err
else
-- if it was, read trailers into headers table
headers, err = receiveheaders(sock, headers)
if not headers then return nil, err end
end
end
})
end
socket.sinkt["http-chunked"] = function(sock)
return base.setmetatable({
getfd = function() return sock:getfd() end,
dirty = function() return sock:dirty() end
}, {
__call = function(self, chunk, err)
if not chunk then return sock:send("0\r\n\r\n") end
local size = string.format("%X\r\n", string.len(chunk))
return sock:send(size .. chunk .. "\r\n")
end
})
end
-----------------------------------------------------------------------------
-- Low level HTTP API
-----------------------------------------------------------------------------
local metat = { __index = {} }
function _M.open(host, port, create)
-- create socket with user connect function, or with default
local c = socket.try((create or socket.tcp)())
local h = base.setmetatable({ c = c }, metat)
-- create finalized try
h.try = socket.newtry(function() h:close() end)
-- set timeout before connecting
h.try(c:settimeout(_M.TIMEOUT))
h.try(c:connect(host, port or _M.PORT))
-- here everything worked
return h
end
function metat.__index:sendrequestline(method, uri)
local reqline = string.format("%s %s HTTP/1.1\r\n", method or "GET", uri)
return self.try(self.c:send(reqline))
end
function metat.__index:sendheaders(tosend)
local canonic = headers.canonic
local h = "\r\n"
for f, v in base.pairs(tosend) do
h = (canonic[f] or f) .. ": " .. v .. "\r\n" .. h
end
self.try(self.c:send(h))
return 1
end
function metat.__index:sendbody(headers, source, step)
source = source or ltn12.source.empty()
step = step or ltn12.pump.step
-- if we don't know the size in advance, send chunked and hope for the best
local mode = "http-chunked"
if headers["content-length"] then mode = "keep-open" end
return self.try(ltn12.pump.all(source, socket.sink(mode, self.c), step))
end
function metat.__index:receivestatusline()
local status = self.try(self.c:receive(5))
-- identify HTTP/0.9 responses, which do not contain a status line
-- this is just a heuristic, but is what the RFC recommends
if status ~= "HTTP/" then return nil, status end
-- otherwise proceed reading a status line
status = self.try(self.c:receive("*l", status))
local code = socket.skip(2, string.find(status, "HTTP/%d*%.%d* (%d%d%d)"))
return self.try(base.tonumber(code), status)
end
function metat.__index:receiveheaders()
return self.try(receiveheaders(self.c))
end
function metat.__index:receivebody(headers, sink, step)
sink = sink or ltn12.sink.null()
step = step or ltn12.pump.step
local length = base.tonumber(headers["content-length"])
local t = headers["transfer-encoding"] -- shortcut
local mode = "default" -- connection close
if t and t ~= "identity" then mode = "http-chunked"
elseif base.tonumber(headers["content-length"]) then mode = "by-length" end
return self.try(ltn12.pump.all(socket.source(mode, self.c, length),
sink, step))
end
function metat.__index:receive09body(status, sink, step)
local source = ltn12.source.rewind(socket.source("until-closed", self.c))
source(status)
return self.try(ltn12.pump.all(source, sink, step))
end
function metat.__index:close()
return self.c:close()
end
-----------------------------------------------------------------------------
-- High level HTTP API
-----------------------------------------------------------------------------
local function adjusturi(reqt)
local u = reqt
-- if there is a proxy, we need the full url. otherwise, just a part.
if not reqt.proxy and not PROXY then
u = {
path = socket.try(reqt.path, "invalid path 'nil'"),
params = reqt.params,
query = reqt.query,
fragment = reqt.fragment
}
end
return url.build(u)
end
local function adjustproxy(reqt)
local proxy = reqt.proxy or PROXY
if proxy then
proxy = url.parse(proxy)
return proxy.host, proxy.port or 3128
else
return reqt.host, reqt.port
end
end
local function adjustheaders(reqt)
-- default headers
local lower = {
["user-agent"] = _M.USERAGENT,
["host"] = reqt.host,
["connection"] = "close, TE",
["te"] = "trailers"
}
-- if we have authentication information, pass it along
if reqt.user and reqt.password then
lower["authorization"] =
"Basic " .. (mime.b64(reqt.user .. ":" .. reqt.password))
end
-- override with user headers
for i,v in base.pairs(reqt.headers or lower) do
lower[string.lower(i)] = v
end
return lower
end
-- default url parts
local default = {
host = "",
port = _M.PORT,
path ="/",
scheme = "http"
}
local function adjustrequest(reqt)
-- parse url if provided
local nreqt = reqt.url and url.parse(reqt.url, default) or {}
-- explicit components override url
for i,v in base.pairs(reqt) do nreqt[i] = v end
if nreqt.port == "" then nreqt.port = 80 end
socket.try(nreqt.host and nreqt.host ~= "",
"invalid host '" .. base.tostring(nreqt.host) .. "'")
-- compute uri if user hasn't overriden
nreqt.uri = reqt.uri or adjusturi(nreqt)
-- ajust host and port if there is a proxy
nreqt.host, nreqt.port = adjustproxy(nreqt)
-- adjust headers in request
nreqt.headers = adjustheaders(nreqt)
return nreqt
end
local function shouldredirect(reqt, code, headers)
return headers.location and
string.gsub(headers.location, "%s", "") ~= "" and
(reqt.redirect ~= false) and
(code == 301 or code == 302 or code == 303 or code == 307) and
(not reqt.method or reqt.method == "GET" or reqt.method == "HEAD")
and (not reqt.nredirects or reqt.nredirects < 5)
end
local function shouldreceivebody(reqt, code)
if reqt.method == "HEAD" then return nil end
if code == 204 or code == 304 then return nil end
if code >= 100 and code < 200 then return nil end
return 1
end
-- forward declarations
local trequest, tredirect
--[[local]] function tredirect(reqt, location)
local result, code, headers, status = trequest {
-- the RFC says the redirect URL has to be absolute, but some
-- servers do not respect that
url = url.absolute(reqt.url, location),
source = reqt.source,
sink = reqt.sink,
headers = reqt.headers,
proxy = reqt.proxy,
nredirects = (reqt.nredirects or 0) + 1,
create = reqt.create
}
-- pass location header back as a hint we redirected
headers = headers or {}
headers.location = headers.location or location
return result, code, headers, status
end
--[[local]] function trequest(reqt)
-- we loop until we get what we want, or
-- until we are sure there is no way to get it
local nreqt = adjustrequest(reqt)
local h = _M.open(nreqt.host, nreqt.port, nreqt.create)
-- send request line and headers
h:sendrequestline(nreqt.method, nreqt.uri)
h:sendheaders(nreqt.headers)
-- if there is a body, send it
if nreqt.source then
h:sendbody(nreqt.headers, nreqt.source, nreqt.step)
end
local code, status = h:receivestatusline()
-- if it is an HTTP/0.9 server, simply get the body and we are done
if not code then
h:receive09body(status, nreqt.sink, nreqt.step)
return 1, 200
end
local headers
-- ignore any 100-continue messages
while code == 100 do
headers = h:receiveheaders()
code, status = h:receivestatusline()
end
headers = h:receiveheaders()
-- at this point we should have a honest reply from the server
-- we can't redirect if we already used the source, so we report the error
if shouldredirect(nreqt, code, headers) and not nreqt.source then
h:close()
return tredirect(reqt, headers.location)
end
-- here we are finally done
if shouldreceivebody(nreqt, code) then
h:receivebody(headers, nreqt.sink, nreqt.step)
end
h:close()
return 1, code, headers, status
end
local function srequest(u, b)
local t = {}
local reqt = {
url = u,
sink = ltn12.sink.table(t)
}
if b then
reqt.source = ltn12.source.string(b)
reqt.headers = {
["content-length"] = string.len(b),
["content-type"] = "application/x-www-form-urlencoded"
}
reqt.method = "POST"
end
local code, headers, status = socket.skip(1, trequest(reqt))
return table.concat(t), code, headers, status
end
_M.request = socket.protect(function(reqt, body)
if base.type(reqt) == "string" then return srequest(reqt, body)
else return trequest(reqt) end
end)
return _M

View File

@ -0,0 +1,256 @@
-----------------------------------------------------------------------------
-- SMTP client support for the Lua language.
-- LuaSocket toolkit.
-- Author: Diego Nehab
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
-- Declare module and import dependencies
-----------------------------------------------------------------------------
local base = _G
local coroutine = require("coroutine")
local string = require("string")
local math = require("math")
local os = require("os")
local socket = require("socket")
local tp = require("socket.tp")
local ltn12 = require("ltn12")
local headers = require("socket.headers")
local mime = require("mime")
socket.smtp = {}
local _M = socket.smtp
-----------------------------------------------------------------------------
-- Program constants
-----------------------------------------------------------------------------
-- timeout for connection
_M.TIMEOUT = 60
-- default server used to send e-mails
_M.SERVER = "localhost"
-- default port
_M.PORT = 25
-- domain used in HELO command and default sendmail
-- If we are under a CGI, try to get from environment
_M.DOMAIN = os.getenv("SERVER_NAME") or "localhost"
-- default time zone (means we don't know)
_M.ZONE = "-0000"
---------------------------------------------------------------------------
-- Low level SMTP API
-----------------------------------------------------------------------------
local metat = { __index = {} }
function metat.__index:greet(domain)
self.try(self.tp:check("2.."))
self.try(self.tp:command("EHLO", domain or _M.DOMAIN))
return socket.skip(1, self.try(self.tp:check("2..")))
end
function metat.__index:mail(from)
self.try(self.tp:command("MAIL", "FROM:" .. from))
return self.try(self.tp:check("2.."))
end
function metat.__index:rcpt(to)
self.try(self.tp:command("RCPT", "TO:" .. to))
return self.try(self.tp:check("2.."))
end
function metat.__index:data(src, step)
self.try(self.tp:command("DATA"))
self.try(self.tp:check("3.."))
self.try(self.tp:source(src, step))
self.try(self.tp:send("\r\n.\r\n"))
return self.try(self.tp:check("2.."))
end
function metat.__index:quit()
self.try(self.tp:command("QUIT"))
return self.try(self.tp:check("2.."))
end
function metat.__index:close()
return self.tp:close()
end
function metat.__index:login(user, password)
self.try(self.tp:command("AUTH", "LOGIN"))
self.try(self.tp:check("3.."))
self.try(self.tp:send(mime.b64(user) .. "\r\n"))
self.try(self.tp:check("3.."))
self.try(self.tp:send(mime.b64(password) .. "\r\n"))
return self.try(self.tp:check("2.."))
end
function metat.__index:plain(user, password)
local auth = "PLAIN " .. mime.b64("\0" .. user .. "\0" .. password)
self.try(self.tp:command("AUTH", auth))
return self.try(self.tp:check("2.."))
end
function metat.__index:auth(user, password, ext)
if not user or not password then return 1 end
if string.find(ext, "AUTH[^\n]+LOGIN") then
return self:login(user, password)
elseif string.find(ext, "AUTH[^\n]+PLAIN") then
return self:plain(user, password)
else
self.try(nil, "authentication not supported")
end
end
-- send message or throw an exception
function metat.__index:send(mailt)
self:mail(mailt.from)
if base.type(mailt.rcpt) == "table" then
for i,v in base.ipairs(mailt.rcpt) do
self:rcpt(v)
end
else
self:rcpt(mailt.rcpt)
end
self:data(ltn12.source.chain(mailt.source, mime.stuff()), mailt.step)
end
function _M.open(server, port, create)
local tp = socket.try(tp.connect(server or _M.SERVER, port or _M.PORT,
_M.TIMEOUT, create))
local s = base.setmetatable({tp = tp}, metat)
-- make sure tp is closed if we get an exception
s.try = socket.newtry(function()
s:close()
end)
return s
end
-- convert headers to lowercase
local function lower_headers(headers)
local lower = {}
for i,v in base.pairs(headers or lower) do
lower[string.lower(i)] = v
end
return lower
end
---------------------------------------------------------------------------
-- Multipart message source
-----------------------------------------------------------------------------
-- returns a hopefully unique mime boundary
local seqno = 0
local function newboundary()
seqno = seqno + 1
return string.format('%s%05d==%05u', os.date('%d%m%Y%H%M%S'),
math.random(0, 99999), seqno)
end
-- send_message forward declaration
local send_message
-- yield the headers all at once, it's faster
local function send_headers(tosend)
local canonic = headers.canonic
local h = "\r\n"
for f,v in base.pairs(tosend) do
h = (canonic[f] or f) .. ': ' .. v .. "\r\n" .. h
end
coroutine.yield(h)
end
-- yield multipart message body from a multipart message table
local function send_multipart(mesgt)
-- make sure we have our boundary and send headers
local bd = newboundary()
local headers = lower_headers(mesgt.headers or {})
headers['content-type'] = headers['content-type'] or 'multipart/mixed'
headers['content-type'] = headers['content-type'] ..
'; boundary="' .. bd .. '"'
send_headers(headers)
-- send preamble
if mesgt.body.preamble then
coroutine.yield(mesgt.body.preamble)
coroutine.yield("\r\n")
end
-- send each part separated by a boundary
for i, m in base.ipairs(mesgt.body) do
coroutine.yield("\r\n--" .. bd .. "\r\n")
send_message(m)
end
-- send last boundary
coroutine.yield("\r\n--" .. bd .. "--\r\n\r\n")
-- send epilogue
if mesgt.body.epilogue then
coroutine.yield(mesgt.body.epilogue)
coroutine.yield("\r\n")
end
end
-- yield message body from a source
local function send_source(mesgt)
-- make sure we have a content-type
local headers = lower_headers(mesgt.headers or {})
headers['content-type'] = headers['content-type'] or
'text/plain; charset="iso-8859-1"'
send_headers(headers)
-- send body from source
while true do
local chunk, err = mesgt.body()
if err then coroutine.yield(nil, err)
elseif chunk then coroutine.yield(chunk)
else break end
end
end
-- yield message body from a string
local function send_string(mesgt)
-- make sure we have a content-type
local headers = lower_headers(mesgt.headers or {})
headers['content-type'] = headers['content-type'] or
'text/plain; charset="iso-8859-1"'
send_headers(headers)
-- send body from string
coroutine.yield(mesgt.body)
end
-- message source
function send_message(mesgt)
if base.type(mesgt.body) == "table" then send_multipart(mesgt)
elseif base.type(mesgt.body) == "function" then send_source(mesgt)
else send_string(mesgt) end
end
-- set defaul headers
local function adjust_headers(mesgt)
local lower = lower_headers(mesgt.headers)
lower["date"] = lower["date"] or
os.date("!%a, %d %b %Y %H:%M:%S ") .. (mesgt.zone or _M.ZONE)
lower["x-mailer"] = lower["x-mailer"] or socket._VERSION
-- this can't be overriden
lower["mime-version"] = "1.0"
return lower
end
function _M.message(mesgt)
mesgt.headers = adjust_headers(mesgt)
-- create and return message source
local co = coroutine.create(function() send_message(mesgt) end)
return function()
local ret, a, b = coroutine.resume(co)
if ret then return a, b
else return nil, a end
end
end
---------------------------------------------------------------------------
-- High level SMTP API
-----------------------------------------------------------------------------
_M.send = socket.protect(function(mailt)
local s = _M.open(mailt.server, mailt.port, mailt.create)
local ext = s:greet(mailt.domain)
s:auth(mailt.user, mailt.password, ext)
s:send(mailt)
s:quit()
return s:close()
end)
return _M

View File

@ -0,0 +1,126 @@
-----------------------------------------------------------------------------
-- Unified SMTP/FTP subsystem
-- LuaSocket toolkit.
-- Author: Diego Nehab
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
-- Declare module and import dependencies
-----------------------------------------------------------------------------
local base = _G
local string = require("string")
local socket = require("socket")
local ltn12 = require("ltn12")
socket.tp = {}
local _M = socket.tp
-----------------------------------------------------------------------------
-- Program constants
-----------------------------------------------------------------------------
_M.TIMEOUT = 60
-----------------------------------------------------------------------------
-- Implementation
-----------------------------------------------------------------------------
-- gets server reply (works for SMTP and FTP)
local function get_reply(c)
local code, current, sep
local line, err = c:receive()
local reply = line
if err then return nil, err end
code, sep = socket.skip(2, string.find(line, "^(%d%d%d)(.?)"))
if not code then return nil, "invalid server reply" end
if sep == "-" then -- reply is multiline
repeat
line, err = c:receive()
if err then return nil, err end
current, sep = socket.skip(2, string.find(line, "^(%d%d%d)(.?)"))
reply = reply .. "\n" .. line
-- reply ends with same code
until code == current and sep == " "
end
return code, reply
end
-- metatable for sock object
local metat = { __index = {} }
function metat.__index:check(ok)
local code, reply = get_reply(self.c)
if not code then return nil, reply end
if base.type(ok) ~= "function" then
if base.type(ok) == "table" then
for i, v in base.ipairs(ok) do
if string.find(code, v) then
return base.tonumber(code), reply
end
end
return nil, reply
else
if string.find(code, ok) then return base.tonumber(code), reply
else return nil, reply end
end
else return ok(base.tonumber(code), reply) end
end
function metat.__index:command(cmd, arg)
cmd = string.upper(cmd)
if arg then
return self.c:send(cmd .. " " .. arg.. "\r\n")
else
return self.c:send(cmd .. "\r\n")
end
end
function metat.__index:sink(snk, pat)
local chunk, err = c:receive(pat)
return snk(chunk, err)
end
function metat.__index:send(data)
return self.c:send(data)
end
function metat.__index:receive(pat)
return self.c:receive(pat)
end
function metat.__index:getfd()
return self.c:getfd()
end
function metat.__index:dirty()
return self.c:dirty()
end
function metat.__index:getcontrol()
return self.c
end
function metat.__index:source(source, step)
local sink = socket.sink("keep-open", self.c)
local ret, err = ltn12.pump.all(source, sink, step or ltn12.pump.step)
return ret, err
end
-- closes the underlying c
function metat.__index:close()
self.c:close()
return 1
end
-- connect with server and return c object
function _M.connect(host, port, timeout, create)
local c, e = (create or socket.tcp)()
if not c then return nil, e end
c:settimeout(timeout or _M.TIMEOUT)
local r, e = c:connect(host, port)
if not r then
c:close()
return nil, e
end
return base.setmetatable({c = c}, metat)
end
return _M

View File

@ -0,0 +1,307 @@
-----------------------------------------------------------------------------
-- URI parsing, composition and relative URL resolution
-- LuaSocket toolkit.
-- Author: Diego Nehab
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
-- Declare module
-----------------------------------------------------------------------------
local string = require("string")
local base = _G
local table = require("table")
local socket = require("socket")
socket.url = {}
local _M = socket.url
-----------------------------------------------------------------------------
-- Module version
-----------------------------------------------------------------------------
_M._VERSION = "URL 1.0.3"
-----------------------------------------------------------------------------
-- Encodes a string into its escaped hexadecimal representation
-- Input
-- s: binary string to be encoded
-- Returns
-- escaped representation of string binary
-----------------------------------------------------------------------------
function _M.escape(s)
return (string.gsub(s, "([^A-Za-z0-9_])", function(c)
return string.format("%%%02x", string.byte(c))
end))
end
-----------------------------------------------------------------------------
-- Protects a path segment, to prevent it from interfering with the
-- url parsing.
-- Input
-- s: binary string to be encoded
-- Returns
-- escaped representation of string binary
-----------------------------------------------------------------------------
local function make_set(t)
local s = {}
for i,v in base.ipairs(t) do
s[t[i]] = 1
end
return s
end
-- these are allowed withing a path segment, along with alphanum
-- other characters must be escaped
local segment_set = make_set {
"-", "_", ".", "!", "~", "*", "'", "(",
")", ":", "@", "&", "=", "+", "$", ",",
}
local function protect_segment(s)
return string.gsub(s, "([^A-Za-z0-9_])", function (c)
if segment_set[c] then return c
else return string.format("%%%02x", string.byte(c)) end
end)
end
-----------------------------------------------------------------------------
-- Encodes a string into its escaped hexadecimal representation
-- Input
-- s: binary string to be encoded
-- Returns
-- escaped representation of string binary
-----------------------------------------------------------------------------
function _M.unescape(s)
return (string.gsub(s, "%%(%x%x)", function(hex)
return string.char(base.tonumber(hex, 16))
end))
end
-----------------------------------------------------------------------------
-- Builds a path from a base path and a relative path
-- Input
-- base_path
-- relative_path
-- Returns
-- corresponding absolute path
-----------------------------------------------------------------------------
local function absolute_path(base_path, relative_path)
if string.sub(relative_path, 1, 1) == "/" then return relative_path end
local path = string.gsub(base_path, "[^/]*$", "")
path = path .. relative_path
path = string.gsub(path, "([^/]*%./)", function (s)
if s ~= "./" then return s else return "" end
end)
path = string.gsub(path, "/%.$", "/")
local reduced
while reduced ~= path do
reduced = path
path = string.gsub(reduced, "([^/]*/%.%./)", function (s)
if s ~= "../../" then return "" else return s end
end)
end
path = string.gsub(reduced, "([^/]*/%.%.)$", function (s)
if s ~= "../.." then return "" else return s end
end)
return path
end
-----------------------------------------------------------------------------
-- Parses a url and returns a table with all its parts according to RFC 2396
-- The following grammar describes the names given to the URL parts
-- <url> ::= <scheme>://<authority>/<path>;<params>?<query>#<fragment>
-- <authority> ::= <userinfo>@<host>:<port>
-- <userinfo> ::= <user>[:<password>]
-- <path> :: = {<segment>/}<segment>
-- Input
-- url: uniform resource locator of request
-- default: table with default values for each field
-- Returns
-- table with the following fields, where RFC naming conventions have
-- been preserved:
-- scheme, authority, userinfo, user, password, host, port,
-- path, params, query, fragment
-- Obs:
-- the leading '/' in {/<path>} is considered part of <path>
-----------------------------------------------------------------------------
function _M.parse(url, default)
-- initialize default parameters
local parsed = {}
for i,v in base.pairs(default or parsed) do parsed[i] = v end
-- empty url is parsed to nil
if not url or url == "" then return nil, "invalid url" end
-- remove whitespace
-- url = string.gsub(url, "%s", "")
-- get fragment
url = string.gsub(url, "#(.*)$", function(f)
parsed.fragment = f
return ""
end)
-- get scheme
url = string.gsub(url, "^([%w][%w%+%-%.]*)%:",
function(s) parsed.scheme = s; return "" end)
-- get authority
url = string.gsub(url, "^//([^/]*)", function(n)
parsed.authority = n
return ""
end)
-- get query string
url = string.gsub(url, "%?(.*)", function(q)
parsed.query = q
return ""
end)
-- get params
url = string.gsub(url, "%;(.*)", function(p)
parsed.params = p
return ""
end)
-- path is whatever was left
if url ~= "" then parsed.path = url end
local authority = parsed.authority
if not authority then return parsed end
authority = string.gsub(authority,"^([^@]*)@",
function(u) parsed.userinfo = u; return "" end)
authority = string.gsub(authority, ":([^:%]]*)$",
function(p) parsed.port = p; return "" end)
if authority ~= "" then
-- IPv6?
parsed.host = string.match(authority, "^%[(.+)%]$") or authority
end
local userinfo = parsed.userinfo
if not userinfo then return parsed end
userinfo = string.gsub(userinfo, ":([^:]*)$",
function(p) parsed.password = p; return "" end)
parsed.user = userinfo
return parsed
end
-----------------------------------------------------------------------------
-- Rebuilds a parsed URL from its components.
-- Components are protected if any reserved or unallowed characters are found
-- Input
-- parsed: parsed URL, as returned by parse
-- Returns
-- a stringing with the corresponding URL
-----------------------------------------------------------------------------
function _M.build(parsed)
local ppath = _M.parse_path(parsed.path or "")
local url = _M.build_path(ppath)
if parsed.params then url = url .. ";" .. parsed.params end
if parsed.query then url = url .. "?" .. parsed.query end
local authority = parsed.authority
if parsed.host then
authority = parsed.host
if string.find(authority, ":") then -- IPv6?
authority = "[" .. authority .. "]"
end
if parsed.port then authority = authority .. ":" .. parsed.port end
local userinfo = parsed.userinfo
if parsed.user then
userinfo = parsed.user
if parsed.password then
userinfo = userinfo .. ":" .. parsed.password
end
end
if userinfo then authority = userinfo .. "@" .. authority end
end
if authority then url = "//" .. authority .. url end
if parsed.scheme then url = parsed.scheme .. ":" .. url end
if parsed.fragment then url = url .. "#" .. parsed.fragment end
-- url = string.gsub(url, "%s", "")
return url
end
-----------------------------------------------------------------------------
-- Builds a absolute URL from a base and a relative URL according to RFC 2396
-- Input
-- base_url
-- relative_url
-- Returns
-- corresponding absolute url
-----------------------------------------------------------------------------
function _M.absolute(base_url, relative_url)
if base.type(base_url) == "table" then
base_parsed = base_url
base_url = _M.build(base_parsed)
else
base_parsed = _M.parse(base_url)
end
local relative_parsed = _M.parse(relative_url)
if not base_parsed then return relative_url
elseif not relative_parsed then return base_url
elseif relative_parsed.scheme then return relative_url
else
relative_parsed.scheme = base_parsed.scheme
if not relative_parsed.authority then
relative_parsed.authority = base_parsed.authority
if not relative_parsed.path then
relative_parsed.path = base_parsed.path
if not relative_parsed.params then
relative_parsed.params = base_parsed.params
if not relative_parsed.query then
relative_parsed.query = base_parsed.query
end
end
else
relative_parsed.path = absolute_path(base_parsed.path or "",
relative_parsed.path)
end
end
return _M.build(relative_parsed)
end
end
-----------------------------------------------------------------------------
-- Breaks a path into its segments, unescaping the segments
-- Input
-- path
-- Returns
-- segment: a table with one entry per segment
-----------------------------------------------------------------------------
function _M.parse_path(path)
local parsed = {}
path = path or ""
--path = string.gsub(path, "%s", "")
string.gsub(path, "([^/]+)", function (s) table.insert(parsed, s) end)
for i = 1, #parsed do
parsed[i] = _M.unescape(parsed[i])
end
if string.sub(path, 1, 1) == "/" then parsed.is_absolute = 1 end
if string.sub(path, -1, -1) == "/" then parsed.is_directory = 1 end
return parsed
end
-----------------------------------------------------------------------------
-- Builds a path component from its segments, escaping protected characters.
-- Input
-- parsed: path segments
-- unsafe: if true, segments are not protected before path is built
-- Returns
-- path: corresponding path stringing
-----------------------------------------------------------------------------
function _M.build_path(parsed, unsafe)
local path = ""
local n = #parsed
if unsafe then
for i = 1, n-1 do
path = path .. parsed[i]
path = path .. "/"
end
if n > 0 then
path = path .. parsed[n]
if parsed.is_directory then path = path .. "/" end
end
else
for i = 1, n-1 do
path = path .. protect_segment(parsed[i])
path = path .. "/"
end
if n > 0 then
path = path .. protect_segment(parsed[n])
if parsed.is_directory then path = path .. "/" end
end
end
if parsed.is_absolute then path = "/" .. path end
return path
end
return _M

View File

@ -0,0 +1,93 @@
------------------------------------------------------------------------------
-- LuaSec 0.4.1
-- Copyright (C) 2006-2011 Bruno Silvestre
--
------------------------------------------------------------------------------
module("ssl", package.seeall)
require("ssl.core")
require("ssl.context")
_VERSION = "0.4.1"
_COPYRIGHT = "LuaSec 0.4.1 - Copyright (C) 2006-2011 Bruno Silvestre\n" ..
"LuaSocket 2.0.2 - Copyright (C) 2004-2007 Diego Nehab"
-- Export functions
rawconnection = core.rawconnection
rawcontext = context.rawcontext
--
--
--
local function optexec(func, param, ctx)
if param then
if type(param) == "table" then
return func(ctx, unpack(param))
else
return func(ctx, param)
end
end
return true
end
--
--
--
function newcontext(cfg)
local succ, msg, ctx
-- Create the context
ctx, msg = context.create(cfg.protocol)
if not ctx then return nil, msg end
-- Mode
succ, msg = context.setmode(ctx, cfg.mode)
if not succ then return nil, msg end
-- Load the key
if cfg.key then
succ, msg = context.loadkey(ctx, cfg.key, cfg.password)
if not succ then return nil, msg end
end
-- Load the certificate
if cfg.certificate then
succ, msg = context.loadcert(ctx, cfg.certificate)
if not succ then return nil, msg end
end
-- Load the CA certificates
if cfg.cafile or cfg.capath then
succ, msg = context.locations(ctx, cfg.cafile, cfg.capath)
if not succ then return nil, msg end
end
-- Set the verification options
succ, msg = optexec(context.setverify, cfg.verify, ctx)
if not succ then return nil, msg end
-- Set SSL options
succ, msg = optexec(context.setoptions, cfg.options, ctx)
if not succ then return nil, msg end
-- Set the depth for certificate verification
if cfg.depth then
succ, msg = context.setdepth(ctx, cfg.depth)
if not succ then return nil, msg end
end
return ctx
end
--
--
--
function wrap(sock, cfg)
local ctx, msg
if type(cfg) == "table" then
ctx, msg = newcontext(cfg)
if not ctx then return nil, msg end
else
ctx = cfg
end
local s, msg = core.create(ctx)
if s then
core.setfd(s, sock:getfd())
sock:setfd(core.invalidfd)
return s
end
return nil, msg
end

View File

@ -0,0 +1,138 @@
----------------------------------------------------------------------------
-- LuaSec 0.4.1
-- Copyright (C) 2009-2011 PUC-Rio
--
-- Author: Pablo Musa
-- Author: Tomas Guisasola
---------------------------------------------------------------------------
local socket = require("socket")
local ssl = require("ssl")
local ltn12 = require("ltn12")
local http = require("socket.http")
local url = require("socket.url")
local table = require("table")
local string = require("string")
local try = socket.try
local type = type
local pairs = pairs
local getmetatable = getmetatable
module("ssl.https")
_VERSION = "0.4.1"
_COPYRIGHT = "LuaSec 0.4.1 - Copyright (C) 2009-2011 PUC-Rio"
-- Default settings
PORT = 443
local cfg = {
protocol = "tlsv1",
options = "all",
verify = "none",
}
--------------------------------------------------------------------
-- Auxiliar Functions
--------------------------------------------------------------------
-- Insert default HTTPS port.
local function default_https_port(u)
return url.build(url.parse(u, {port = PORT}))
end
-- Convert an URL to a table according to Luasocket needs.
local function urlstring_totable(url, body, result_table)
url = {
url = default_https_port(url),
method = body and "POST" or "GET",
sink = ltn12.sink.table(result_table)
}
if body then
url.source = ltn12.source.string(body)
url.headers = {
["content-length"] = #body,
["content-type"] = "application/x-www-form-urlencoded",
}
end
return url
end
-- Forward calls to the real connection object.
local function reg(conn)
local mt = getmetatable(conn.sock).__index
for name, method in pairs(mt) do
if type(method) == "function" then
conn[name] = function (self, ...)
return method(self.sock, ...)
end
end
end
end
-- Return a function which performs the SSL/TLS connection.
local function tcp(params)
params = params or {}
-- Default settings
for k, v in pairs(cfg) do
params[k] = params[k] or v
end
-- Force client mode
params.mode = "client"
-- 'create' function for LuaSocket
return function ()
local conn = {}
conn.sock = try(socket.tcp())
local st = getmetatable(conn.sock).__index.settimeout
function conn:settimeout(...)
return st(self.sock, ...)
end
-- Replace TCP's connection function
function conn:connect(host, port)
try(self.sock:connect(host, port))
self.sock = try(ssl.wrap(self.sock, params))
try(self.sock:dohandshake())
reg(self, getmetatable(self.sock))
return 1
end
return conn
end
end
--------------------------------------------------------------------
-- Main Function
--------------------------------------------------------------------
-- Make a HTTP request over secure connection. This function receives
-- the same parameters of LuaSocket's HTTP module (except 'proxy' and
-- 'redirect') plus LuaSec parameters.
--
-- @param url mandatory (string or table)
-- @param body optional (string)
-- @return (string if url == string or 1), code, headers, status
--
function request(url, body)
local result_table = {}
local stringrequest = type(url) == "string"
if stringrequest then
url = urlstring_totable(url, body, result_table)
else
url.url = default_https_port(url.url)
end
if http.PROXY or url.proxy then
return nil, "proxy not supported"
elseif url.redirect then
return nil, "redirect not supported"
elseif url.create then
return nil, "create function not permitted"
end
-- New 'create' function to establish a secure connection
url.create = tcp(url)
local res, code, headers, status = http.request(url)
if res and stringrequest then
return table.concat(result_table), code, headers, status
end
return res, code, headers, status
end

View File

@ -0,0 +1,315 @@
--
-- Copyright (C) 2012 Paul Kulchenko
-- A simple testing library
-- Based on lua-TestMore : <http://fperrad.github.com/lua-TestMore/>
-- Copyright (c) 2009-2011 Francois Perrad
-- This library is licensed under the terms of the MIT/X11 license,
-- like Lua itself.
--
local pairs = pairs
local tostring = tostring
local type = type
local _G = _G or _ENV
-----------------------------------------------------------
local tb = {
curr_test = 0,
good_test = 0,
skip_test = 0,
}
function tb:print(...)
print(...)
end
function tb:note(...)
self:print(...)
end
function tb:diag(...)
local arg = {...}
for k, v in pairs(arg) do
arg[k] = tostring(v)
end
local msg = table.concat(arg)
msg = msg:gsub("\n", "\n# ")
msg = msg:gsub("\n# \n", "\n#\n")
msg = msg:gsub("\n# $", '')
self:print("# " .. msg)
end
function tb:ok(test, name, more)
self.curr_test = self.curr_test + 1
self.good_test = self.good_test + (test and 1 or 0)
self.skip_test = self.skip_test + (test == nil and 1 or 0)
name = tostring(name or '')
local out = ''
if not test then
out = "not "
end
out = out .. "ok " .. self.curr_test
if name ~= '' then
out = out .. " - " .. name
end
self:print(out)
if test == false then
self:diag(" Failed test " .. (name and ("'" .. name .. "'") or ''))
if debug then
local info = debug.getinfo(3)
local file = info.short_src
local line = info.currentline
self:diag(" in " .. file .. " at line " .. line .. ".")
end
self:diag(more)
end
end
function tb:done_testing(reset)
local c, g, s = self.curr_test, self.good_test, self.skip_test
if reset then
self.curr_test = 0
self.good_test = 0
self.skip_test = 0
end
return c, g, s
end
-----------------------------------------------------------
local serpent = (function() ---- include Serpent module for serialization
local n, v = "serpent", 0.15 -- (C) 2012 Paul Kulchenko; MIT License
local c, d = "Paul Kulchenko", "Serializer and pretty printer of Lua data types"
local snum = {[tostring(1/0)]='1/0 --[[math.huge]]',[tostring(-1/0)]='-1/0 --[[-math.huge]]',[tostring(0/0)]='0/0'}
local badtype = {thread = true, userdata = true}
local keyword, globals, G = {}, {}, (_G or _ENV)
for _,k in ipairs({'and', 'break', 'do', 'else', 'elseif', 'end', 'false',
'for', 'function', 'goto', 'if', 'in', 'local', 'nil', 'not', 'or', 'repeat',
'return', 'then', 'true', 'until', 'while'}) do keyword[k] = true end
for k,v in pairs(G) do globals[v] = k end -- build func to name mapping
for _,g in ipairs({'coroutine', 'debug', 'io', 'math', 'string', 'table', 'os'}) do
for k,v in pairs(G[g]) do globals[v] = g..'.'..k end end
local function s(t, opts)
local name, indent, fatal = opts.name, opts.indent, opts.fatal
local sparse, custom, huge = opts.sparse, opts.custom, not opts.nohuge
local space, maxl = (opts.compact and '' or ' '), (opts.maxlevel or math.huge)
local comm = opts.comment and (tonumber(opts.comment) or math.huge)
local seen, sref, syms, symn = {}, {}, {}, 0
local function gensym(val) return tostring(val):gsub("[^%w]",""):gsub("(%d%w+)",
function(s) if not syms[s] then symn = symn+1; syms[s] = symn end return syms[s] end) end
local function safestr(s) return type(s) == "number" and (huge and snum[tostring(s)] or s)
or type(s) ~= "string" and tostring(s) -- escape NEWLINE/010 and EOF/026
or ("%q"):format(s):gsub("\010","n"):gsub("\026","\\026") end
local function comment(s,l) return comm and (l or 0) < comm and ' --[['..tostring(s)..']]' or '' end
local function globerr(s,l) return globals[s] and globals[s]..comment(s,l) or not fatal
and safestr(tostring(s))..comment('err',l) or error("Can't serialize "..tostring(s)) end
local function safename(path, name) -- generates foo.bar, foo[3], or foo['b a r']
local n = name == nil and '' or name
local plain = type(n) == "string" and n:match("^[%l%u_][%w_]*$") and not keyword[n]
local safe = plain and n or '['..safestr(n)..']'
return (path or '')..(plain and path and '.' or '')..safe, safe end
local alphanumsort = type(opts.sortkeys) == 'function' and opts.sortkeys or function(o, n)
local maxn, to = tonumber(n) or 12, {number = 'a', string = 'b'}
local function padnum(d) return ("%0"..maxn.."d"):format(d) end
table.sort(o, function(a,b)
return (o[a] and 0 or to[type(a)] or 'z')..(tostring(a):gsub("%d+",padnum))
< (o[b] and 0 or to[type(b)] or 'z')..(tostring(b):gsub("%d+",padnum)) end) end
local function val2str(t, name, indent, path, plainindex, level)
local ttype, level = type(t), (level or 0)
local spath, sname = safename(path, name)
local tag = plainindex and
((type(name) == "number") and '' or name..space..'='..space) or
(name ~= nil and sname..space..'='..space or '')
if seen[t] then
table.insert(sref, spath..space..'='..space..seen[t])
return tag..'nil'..comment('ref', level)
elseif badtype[ttype] then return tag..globerr(t, level)
elseif ttype == 'function' then
seen[t] = spath
local ok, res = pcall(string.dump, t)
local func = ok and ((opts.nocode and "function() end" or
"loadstring("..safestr(res)..",'@serialized')")..comment(t, level))
return tag..(func or globerr(t, level))
elseif ttype == "table" then
if level >= maxl then return tag..'{}'..comment('max', level) end
seen[t] = spath
if next(t) == nil then return tag..'{}'..comment(t, level) end -- table empty
local maxn, o, out = #t, {}, {}
for key = 1, maxn do table.insert(o, key) end
for key in pairs(t) do if not o[key] then table.insert(o, key) end end
if opts.sortkeys then alphanumsort(o, opts.sortkeys) end
for n, key in ipairs(o) do
local value, ktype, plainindex = t[key], type(key), n <= maxn and not sparse
if opts.ignore and opts.ignore[value] -- skip ignored values; do nothing
or sparse and value == nil then -- skipping nils; do nothing
elseif ktype == 'table' or ktype == 'function' then
if not seen[key] and not globals[key] then
table.insert(sref, 'local '..val2str(key,gensym(key),indent)) end
table.insert(sref, seen[t]..'['..(seen[key] or globals[key] or gensym(key))
..']'..space..'='..space..(seen[value] or val2str(value,nil,indent)))
else
if badtype[ktype] then plainindex, key = true, '['..globerr(key, level+1)..']' end
table.insert(out,val2str(value,key,indent,spath,plainindex,level+1))
end
end
local prefix = string.rep(indent or '', level)
local head = indent and '{\n'..prefix..indent or '{'
local body = table.concat(out, ','..(indent and '\n'..prefix..indent or space))
local tail = indent and "\n"..prefix..'}' or '}'
return (custom and custom(tag,head,body,tail) or tag..head..body..tail)..comment(t, level)
else return tag..safestr(t) end -- handle all other types
end
local sepr = indent and "\n" or ";"..space
local body = val2str(t, name, indent) -- this call also populates sref
local tail = #sref>0 and table.concat(sref, sepr)..sepr or ''
return not name and body or "do local "..body..sepr..tail.."return "..name..sepr.."end"
end
local function merge(a, b) if b then for k,v in pairs(b) do a[k] = v end end; return a; end
return { _NAME = n, _COPYRIGHT = c, _DESCRIPTION = d, _VERSION = v, serialize = s,
dump = function(a, opts) return s(a, merge({name = '_', compact = true, sparse = true}, opts)) end,
line = function(a, opts) return s(a, merge({sortkeys = true, comment = true}, opts)) end,
block = function(a, opts) return s(a, merge({indent = ' ', sortkeys = true, comment = true}, opts)) end }
end)() ---- end of Serpent module
-----------------------------------------------------------
local m = {}
function m.ok(test, name)
tb:ok(test, name)
end
local parms = {comment = false}
function m.is(got, expected, name)
local tgot, texp = type(got), type(expected)
local vgot, vexp = serpent.line(got, parms), serpent.line(expected, parms)
local pass = vgot == vexp
if got == nil then pass = nil end
tb:ok(pass, name, not pass and
" got: " .. vgot .. " (" .. tgot .. ")" ..
"\n expected: " .. vexp .. " (" .. texp .. ")")
end
function m.isnt(got, expected, name)
local tgot, texp = type(got), type(expected)
local vgot, vexp = serpent.line(got, parms), serpent.line(expected, parms)
local pass = vgot ~= vexp
if got == nil then pass = nil end
tb:ok(pass, name, not pass and
" got: " .. vgot .. " (" .. tgot .. ")" ..
"\n expected: anything else")
end
function m.like(got, pattern, name)
if type(pattern) ~= 'string' then
return tb:ok(false, name, "pattern isn't a string : " .. tostring(pattern))
end
local pass = tostring(got):match(pattern)
if got == nil then pass = nil end
tb:ok(pass, name, not pass and
" '" .. tostring(got) .. "'" ..
"\n doesn't match '" .. pattern .. "'")
end
function m.unlike(got, pattern, name)
if type(pattern) ~= 'string' then
return tb:ok(false, name, "pattern isn't a string : " .. tostring(pattern))
end
local pass = not tostring(got):match(pattern)
if got == nil then pass = nil end
tb:ok(pass, name, not pass and
" '" .. tostring(got) .. "'" ..
"\n matches '" .. pattern .. "'")
end
local cmp = {
['<'] = function (a, b) return a < b end,
['<='] = function (a, b) return a <= b end,
['>'] = function (a, b) return a > b end,
['>='] = function (a, b) return a >= b end,
['=='] = function (a, b) return a == b end,
['~='] = function (a, b) return a ~= b end,
}
function m.cmp_ok(this, op, that, name)
local f = cmp[op]
if not f then
return tb:ok(false, name, "unknown operator : " .. tostring(op))
end
local pass = f(this, that)
if this == nil then pass = nil end
tb:ok(pass, name, not pass and
" " .. tostring(this) ..
"\n " .. op ..
"\n " .. tostring(that))
end
function m.type_ok(val, t, name)
if type(t) ~= 'string' then
return tb:ok(false, name, "type isn't a string : " .. tostring(t))
end
if type(val) == t then
tb:ok(true, name)
else
tb:ok(false, name,
" " .. tostring(val) .. " isn't a '" .. t .."', it's a '" .. type(val) .. "'")
end
end
function m.diag(...)
tb:diag(...)
end
function m.report()
local total, good, skipped = tb:done_testing(true)
if total == 0 then return end
local failed = total - good - skipped
local sum = ("(%d/%d/%d)."):format(good, skipped, total)
local num, msg = 0, ""
if good > 0 then
num, msg = good, msg .. "passed " .. good
end
if failed > 0 then
num, msg = failed, msg .. (#msg > 0 and (skipped > 0 and ", " or " and ") or "")
.. "failed " .. failed
end
if skipped > 0 then
num, msg = skipped, msg .. (#msg > 0 and ((good > 0 and failed > 0 and ',' or '') .." and ") or "")
.. "skipped " .. skipped
end
msg = ("Looks like you %s test%s of %d %s"):format(msg, (num > 1 and 's' or ''), total, sum)
if skipped == total then msg = "Looks like you skipped all tests " .. sum end
if good == total then msg = "All tests passed " .. sum end
tb:note(("1..%d # %s"):format(total, msg))
end
function m.ismain()
for l = 3, 64 do -- only check up to 64 level; no more needed
local info = debug.getinfo(l)
if not info then return true end
if info.func == require then return false end
end
return true
end
-- this is needed to call report() when the test object is destroyed
if _VERSION >= "Lua 5.2" then
setmetatable(m, {__gc = m.report})
else
-- keep sentinel alive until 'm' is garbage collected
m.sentinel = newproxy(true)
getmetatable(m.sentinel).__gc = m.report
end
for k, v in pairs(m) do -- injection
_G[k] = v
end
return m