elua: remove obsolete xgettext wrapper

This commit is contained in:
Daniel Kolesa 2015-07-01 14:49:44 +01:00
parent f4f808f25d
commit 87a88b5685
5 changed files with 1 additions and 1187 deletions

View File

@ -57,8 +57,7 @@ include Makefile_Elua_Helper.am
eluaappsdir = $(datadir)/elua/apps
eluaapps_DATA = \
scripts/elua/apps/lualian.lua \
scripts/elua/apps/xgettext.lua
scripts/elua/apps/lualian.lua
EXTRA_DIST += $(eluaapps_DATA)
@ -73,15 +72,6 @@ eluamodules_DATA = \
EXTRA_DIST += $(eluamodules_DATA)
eluaxgettextdir = $(eluamodulesdir)/xgettext
eluaxgettext_DATA = \
scripts/elua/modules/xgettext/lexer.lua \
scripts/elua/modules/xgettext/parser.lua \
scripts/elua/modules/xgettext/generator.lua
EXTRA_DIST += $(eluaxgettext_DATA)
eluaeinadir = $(eluamodulesdir)/eina
eluaeina_DATA = \

View File

@ -1,428 +0,0 @@
-- Xgettext application
-- provides a drop-in replacement of xgettext that supports Lua (but not any
-- other language)
local util = require("util")
local cutil = require("cutil")
local getopt = require("getopt")
local generator = require("xgettext.generator")
local VERSION = "1.0.0"
local input_sources = {}
local search_dirs = {}
local excluded_files = {}
local keywords = {}
local flags = { valid = {} }
local opts_final = {}
local opts_nolua = {}
local add_loc = true
local opts, args = getopt.parse {
usage = "Usage: %prog [OPTION] [INPUTFILE]...",
args = arg,
descs = {
{ category = "Input file location" },
{ metavar = "INPUTFILE ...", help = "input files" },
{ "f", "files-from", true, metavar = "FILE",
help = "get list of input files from FILE", list = input_sources
},
{ "D", "directory", true, help = "add DIRECTORY to list for input "
.. "files search\nIf input file is -, standard input is read.",
list = search_dirs, opts = opts_nolua
},
{ category = "Output file location" },
{ "d", "default-domain", true, metavar = "NAME",
help = "use NAME.po for output (instead of messages.po)",
opts = opts_final
},
{ "o", "output", true, metavar = "FILE",
help = "write output to specified file", opts = opts_final
},
{ "p", "output-dir", true, metavar = "DIR", help = "output files "
.. "will be placed in directory DIR\nIf output file is -, "
.. "output is written to standard output.",
opts = opts_final
},
{ category = "Input file interpretation" },
{ "L", "language", true, help = false },
{ "C", "c++", false, help = false,
callback = function(desc, parser, val, opts) opts["L"] = "C++" end
},
{ nil, "from-code", true, metavar = "NAME", help = "encoding of "
.. "input files\nOnly relevant for non-Lua inputs "
.. "(Lua is always assumed to be UTF-8).", opts = opts_nolua
},
{ category = "Operation mode" },
{ "j", "join-existing", false,
help = "join messages with existing file", opts = opts_final
},
{ "x", "exclude-file", true, metavar = "FILE.po",
help = "entries from FILE.po are not extracted",
list = excluded_files
},
{ "c", "add-comments", nil, metavar = "TAG", help = "place comment "
.. "blocks (optionally starting with TAG) and preceding "
.. "keyword lines in output file", opts = opts_nolua
},
{ category = "Language specific options" },
{ "a", "extract-all", false, help = "extract all strings",
opts = opts_nolua
},
{ "k", "keyword", nil, metavar = "WORD", help = "look for WORD as an "
.. "additional keyword or if not given, do no use default keywords",
opts = opts_nolua, list = keywords
},
{ nil, "flag", true, metavar = "WORD:ARG:FLAG", help = "additional "
.. "flag for strings inside the argument number ARG of keyword WORD",
opts = opts_nolua, list = flags
},
{ "T", "trigraphs", false, help = false, opts = opts_nolua },
{ nil, "qt", false, help = false, opts = opts_nolua },
{ nil, "kde", false, help = false, opts = opts_nolua },
{ nil, "boost", false, help = false, opts = opts_nolua },
{ nil, "debug", false, help = "more detailed formatstring "
.. "recognition results", opts = opts_nolua
},
{ category = "Output details" },
{ nil, "color", nil, metavar = "WHEN", help = "use colors and other "
.. "text attributes always or if WHEN. WHEN may be 'always', "
.. "'never', 'auto', or 'html'", opts = opts_final
},
{ nil, "style", true, metavar = "STYLEFILE", help = "specify CSS "
.. "style rule file for --color", opts = opts_final
},
{ nil, "force-po", false, help = "write PO file even if empty",
opts = opts_final
},
{ "i", "indent", false, help = "write the .po file using indented "
.. "style", opts = opts_final
},
{ nil, "no-location", false, help = "do not write '#: filename:line' "
.. "lines", opts = opts_nolua,
callback = function() add_loc = false end
},
{ "n", "add-location", false, help = "generate '#: filename:line' "
.. "lines (default)", opts = opts_nolua,
callback = function() add_loc = true end
},
{ nil, "strict", false, help = "write out strict Uniforum "
.. "conforming .po file", opts = opts_final
},
{ nil, "properties-output", false, help = "write out a Java "
.. ".properties file", opts = opts_final
},
{ nil, "stringtable-output", false, help = "write out a NeXTstep/"
.. "GNUstep .strings file", opts = opts_final
},
{ "w", "width", true, metavar = "NUMBER", help = "set output page "
.. "width", opts = opts_final
},
{ nil, "no-wrap", false, "do not break long message lines, longer "
.. "than the output page width, into several lines",
opts = opts_final
},
{ "s", "sort-output", false, help = "generate sorted output",
opts = opts_final
},
{ "F", "sort-by-file", false, help = "sort output by file location",
opts = opts_final
},
{ nil, "omit-header", false, help = "don't write header with "
.. "'msgid \"\"' entry", opts = opts_final
},
{ nil, "copyright-holder", true, metavar = "STRING", help = "set "
.. "copyright holder in output", opts = opts_final
},
{ nil, "foreign-user", false, help = "omit copyright in output "
.. "for foreign user", opts = opts_final
},
{ nil, "package-name", true, metavar = "PACKAGE", help = "set package "
.. "name in output", opts = opts_final
},
{ nil, "package-version", true, metavar = "VERSION", help = "set "
.. "package version in output", opts = opts_final
},
{ nil, "msgid-bugs-address", true, metavar = "EMAIL@ADDRESS", help =
"set report address for msgid bugs", opts = opts_final
},
{ "m", "msgstr-prefix", true, metavar = "STRING", help = "use STRING "
.. "or \"\" as prefix for msgstr values", opts = opts_final
},
{ "M", "msgstr-suffix", true, metavar = "STRING", help = "use STRING "
.. "or \"\" as suffix for msgstr values", opts = opts_final
},
{ category = "Binaries" },
{ "X", "xgettext", true, metavar = "PATH", help = "path to xgettext." },
{ category = "Informative output" },
{ "h", "help", nil, help = "display this help and exit",
callback = getopt.help_cb(io.stdout)
},
{ "v", "version", false, help = "output version information and exit",
callback = function(p, d)
print("elua-xgettext (EFL) " .. VERSION)
end
},
error_cb = function(parser, msg)
io.stderr:write(msg, "\n")
getopt.help(parser, io.stderr)
end,
done_cb = function(parser, opts, args)
end
}
}
if not opts or opts["h"] or opts["v"] then
return true
end
-- default keywords
if #keywords == 0 then
keywords = { "_", "gettext.gettext" , "gettext.dgettext:2",
"gettext.dcgettext:2" , "gettext.ngettext:1,2",
"gettext.dngettext:2,3", "gettext.dcngettext:2,3" }
end
-- transform some lists into mappings
for i = 1, #excluded_files do
excluded_files[excluded_files[i]] = true
excluded_files[i] = nil
end
for i = 1, #keywords do
local kw = keywords[i]
local kwb, specs = kw:match("^(.+):(.+)$")
local n1, n1c, n2, n2c, n3, n3c, xcmt, argnum
if not kwb then
kwb = kw
else
n1, n1c, n2, n2c, n3, n3c, xcmt
= specs:match("^(%d+)(c?),(%d+)(c?),(%d+)(c?)(.*)$")
if not n1 then
n1, n1c, n2, n2c, xcmt = specs:match("^(%d+)(c?),(%d+)(c?)(.*)$")
if not n1 then
n1, n1c, xcmt = specs:match("^(%d+)(c?)(.*)$")
if not n1 then error("invalid keyword specifier") end
end
end
end
-- all matched, sanitize now
if n1c == "" then n1c = nil end
if n2c == "" then n2c = nil end
if n3c == "" then n3c = nil end
if xcmt == "" then xcmt = nil end
-- sanitize/retrieve comment and potential total number of args
if n3 and xcmt == "t" then
if n3c then error("invalid keyword specifier") end
argnum = n3
n3 = nil
elseif n2 and xcmt == "t" then
if n2c then error("invalid keyword specifier") end
argnum = n2
n2 = nil
elseif n1 and xcmt == "t" then
if n1c then error("invalid keyword specifier") end
argnum = n1
n1 = nil
elseif xcmt then
local xcmtm, rest = xcmt:match('^,"(.+)"(.*)$')
if not xcmtm then
xcmtm, rest = xcmt:match("^,'(.+)'(.*)$")
end
if xcmtm then
xcmt = xcmtm
else
rest = xcmt
xcmt = nil
end
argnum = rest:match("^,(%d+)t$")
-- the rest doesn't match either comment nor argnum nor both
if not xcmt and not argnum then
error("invalid keyword specifier")
end
end
-- allow only one context arg
if (n1c and n2c) or (n2c and n3c) or (n1c and n3c) then
error("invalid keyword specifier")
end
-- retrieve context
local context
if n1c then
context = tonumber(n1)
n1 = n2
n2 = n3
n3 = nil
elseif n2c then
context = tonumber(n2)
n2 = n3
n3 = nil
elseif n3c then
context = tonumber(n3)
elseif n1 and n2 and n3 then -- 3 regular args, forbidden
error("invalid keyword specifier")
end
if keywords[kwb] then
error("cannot specify the same keyword more than twice")
end
-- all sanitized, store :)
keywords[kwb] = { context = context, argnum = tonumber(argnum),
xcomment = xcmt, tonumber(n1) or 1, tonumber(n2) }
keywords[i] = nil
end
-- at least one default path
if #search_dirs == 0 then
search_dirs[1] = "."
end
local build_opt = function(opt)
local buf = {}
if opt.short then
buf[1] = "-"
buf[2] = opt.short
if opt.val then
buf[3] = opt.val
end
else
buf[1] = "--"
buf[2] = opt.long
if opt.val then
buf[3] = "="
buf[4] = opt.val
end
end
return table.concat(buf)
end
local onlylua = opts["L"] and opts["L"]:lower() == "lua"
local neverlua = opts["L"] and opts["L"]:lower() ~= "lua"
local hasxgettext = opts["X"]
if not hasxgettext then
hasxgettext = os.getenv("XGETTEXT")
end
if not hasxgettext then
return true
end
local f = io.open(hasxgettext)
if not f then
return true
end
f:close()
local input_files = {}
for i, v in ipairs(input_sources) do
local f = io.open(v)
if f then
for line in f:lines() do
input_files[#input_files + 1] = line
end
end
end
for i, v in ipairs(args) do
input_files[#input_files + 1] = v
end
local args_nolua = {}
for i, opt in ipairs(opts_nolua) do
args_nolua[#args_nolua + 1] = build_opt(opt)
end
args_nolua[#args_nolua + 1] = "--omit-header"
args_nolua[#args_nolua + 1] = "--output=-"
args_nolua[#args_nolua + 1] = false
local found_files = {}
-- make sure all files exist first
for i, fname in ipairs(input_files) do
if fname ~= "-" and not excluded_files[fname] then
local ff = util.find_file(fname, search_dirs)
if not ff then
error(fname .. ": no such file or directory")
end
found_files[fname] = ff
end
end
-- mapping to real flags
local allowed_lua_flags = {
["lua-format" ] = true,
["pass-lua-format"] = true,
["no-lua-format" ] = true
}
local parsed_files = {}
for i, fname in ipairs(input_files) do
if not excluded_files[fname] then
if onlylua or (not neverlua and fname:lower():match("^.+%.lua$")) then
-- parse lua files here
local fcontents, fpath
-- handle stdin if needed
if fname == "-" then
fpath, fcontents = "=stdin", io.stdin:read("*all")
else
fpath = found_files[fname]
local f = io.open(fpath, "r")
fcontents = f:read("*all")
f:close()
fpath = "@" .. fpath
end
local actual_flags = { valid = flags.valid }
for i, v in ipairs(flags) do
local word, argn, flag = v:match("([^:]+):(%d+):([%a-]+)")
if word and allowed_lua_flags[flag] then
actual_flags[#actual_flags + 1] = { word,
tonumber(argn), flag }
local ft = actual_flags[word]
if not ft then
ft = {}
actual_flags[word] = ft
end
ft[#ft + 1] = { tonumber(argn), flag }
end
end
parsed_files[#parsed_files + 1] = generator.init(fpath, fcontents,
keywords, actual_flags, add_loc, opts)
else
args_nolua[#args_nolua] = fname
local f = assert(cutil.popenv(hasxgettext, "r",
unpack(args_nolua)))
local s = f:read("*all")
parsed_files[#parsed_files + 1] = s
f:close()
end
end
end
local args_final = {}
for i, opt in ipairs(opts_final) do
args_final[#args_final + 1] = build_opt(opt)
end
args_final[#args_final + 1] = "--language=PO"
args_final[#args_final + 1] = "-"
local f = assert(cutil.popenv(hasxgettext, "w", unpack(args_final)))
f:write(table.concat(parsed_files, "\n\n"))
f:close()
return true

View File

@ -1,128 +0,0 @@
-- Elua xgettext: generator
local lexer = require("xgettext.lexer")
local parser = require("xgettext.parser")
local tconc = table.concat
local gen_comment = function(cmt)
local cmtret = {}
for line in cmt:gmatch("([^\n]+)") do
cmtret[#cmtret + 1] = "#. " .. line:match("^%s*(.+)$")
end
return tconc(cmtret, "\n")
end
local gen_message = function(str)
local mline = not not str:find("\n")
if not mline then
return '"' .. str .. '"'
end
local ret = { '""' }
for line in cmt:gmatch("([^\n]+)") do
ret[#ret + 1] = '"' .. line .. '\\n"'
end
return tconc(ret, "\n")
end
local gen_msgstr = function(str, prefix, suffix)
if not prefix and not suffix then
return '""'
end
return gen_message(tconc(prefix
and { prefix, str, suffix } or { str, suffix }))
end
local cmp_msgs = function(msg1, msg2)
return msg1[1] == msg2[1] and msg1.context == msg2.context
end
local new_msg = function(msg)
local copyflags = {}
for i, v in ipairs(msg.flags) do copyflags[#copyflags + 1] = v end
return {
msg[1], msg[2], msg.context, comments = { msg.comment },
xcomment = msg.xcomment, lines = { msg.line }, flags = copyflags
}
end
local gen_grouped_messages = function(ps)
local msg = ps()
local ret = { new_msg(msg) }
msg = ps()
while msg do
local found = false
for i, amsg in ipairs(ret) do
if cmp_msgs(msg, amsg) then
if not amsg[2] then
amsg[2] = msg[2]
end
amsg.lines [#amsg.lines + 1] = msg.line
amsg.comments[#amsg.comments + 1] = msg.comment
for i, v in ipairs(msg.flags) do
amsg.flags[#amsg.flags + 1] = v
end
found = true
break
end
end
if not found then
ret[#ret + 1] = new_msg(msg)
end
msg = ps()
end
for i, msg in ipairs(ret) do
msg.flags = table.uniq(msg.flags)
end
return ret
end
local gen_line_info = function(chunkname, lines)
local cname = lexer.source_to_msg(chunkname)
local linestrs = {}
local linestr = "#:"
local i = 1
while i <= #lines do
local tmps = linestr .. tconc { " ", cname, ":", lines[i] }
if #tmps > 80 then
linestrs[#linestrs + 1] = linestr
linestr = "#:"
else
linestr = tmps
i = i + 1
end
end
linestrs[#linestrs + 1] = linestr
return tconc(linestrs, "\n")
end
return { init = function(chunkname, input, keywords, flags, add_loc, opts)
local rets = {}
for i, msg in ipairs(gen_grouped_messages(parser.init(chunkname,
input, keywords, flags, opts))) do
local ret = {}
if msg.xcomment then
ret[#ret + 1] = gen_comment(msg.xcomment)
end
for i, cmt in ipairs(msg.comments) do
ret[#ret + 1] = gen_comment(cmt)
end
if msg.context then
ret[#ret + 1] = "msgctxt " .. gen_message(msg.context)
end
if add_loc then
ret[#ret + 1] = gen_line_info(chunkname, msg.lines)
end
ret[#ret + 1] = "msgid " .. gen_message(msg[1])
local spf, ssf = opts["m"], opts["M"]
if msg[2] then
ret[#ret + 1] = "msgid_plural " .. gen_message(msg[2])
ret[#ret + 1] = "msgstr[0] " .. gen_msgstr(msg[1], spf, ssf)
ret[#ret + 1] = "msgstr[1] " .. gen_msgstr(msg[2], spf, ssf)
else
ret[#ret + 1] = "msgstr " .. gen_msgstr(msg[1], spf, ssf)
end
rets[#rets + 1] = tconc(ret, "\n")
end
return tconc(rets, "\n\n")
end }

View File

@ -1,353 +0,0 @@
-- Elua xgettext: lexer
local yield = coroutine.yield
local tconc = table.concat
local keywords = {
["and" ] = true, ["break" ] = true, ["do" ] = true, ["else"] = true,
["elseif" ] = true, ["end" ] = true, ["false"] = true, ["for" ] = true,
["function"] = true, ["goto" ] = true, ["if" ] = true, ["in" ] = true,
["local" ] = true, ["nil" ] = true, ["not" ] = true, ["or" ] = true,
["repeat" ] = true, ["return"] = true, ["then" ] = true, ["true"] = true,
["until" ] = true, ["while" ] = true
}
local tokens = {
"..", "...", "==", ">=", "<=", "~=", "::",
"<name>", "<string>", "<number>", "<eof>"
}
local max_custom_len = 79
local max_fname_len = 72
local max_str_len = 63
local source_to_msg = function(source)
local c = source:sub(1, 1)
local srclen = #source
if c == "@" then
if srclen <= (max_fname_len + 1) then
return source:sub(2)
else
return "..." .. source:sub(srclen - max_fname_len + 1)
end
elseif c == "=" then
return source:sub(2, max_custom_len + 1)
else
return '[string "' .. source:sub(1, max_str_len)
.. ((srclen > max_str_len) and '..."]' or '"]')
end
end
local lex_error = function(ls, msg, tok)
msg = ("%s:%d: %s"):format(source_to_msg(ls.source), ls.line_number, msg)
if tok then
msg = msg .. " near '" .. tok .. "'"
end
error(msg, 0)
end
local syntax_error = function(ls, msg)
lex_error(ls, msg, ls.token.value or ls.token.name)
end
local next_char = function(ls)
local c = ls.reader()
ls.current = c
return c
end
local next_line = function(ls, cs)
local old = ls.current
assert(old == "\n" or old == "\r")
local c = next_char(ls)
if (c == "\n" or c == "\r") and c ~= old then
c = next_char(ls)
end
ls.line_number = ls.line_number + 1
return c
end
local read_number = function(ls, beg)
local buf = {}
if beg then buf[1] = beg end
local c = ls.current
while c == "." or c:match("%d") do
buf[#buf + 1] = c
c = next_char(ls)
end
if c == "e" or c == "E" then
buf[#buf + 1] = c
c = next_char(ls)
if c == "+" or c == "-" then
buf[#buf + 1] = c
c = next_char(ls)
end
end
while c:match("%w") do
buf[#buf + 1] = c
c = next_char(ls)
end
local str = tconc(buf)
if not tonumber(str) then
lex_error(ls, "malformed number", str)
end
return str
end
local skip_sep = function(ls, buf)
local cnt = 0
local s = ls.current
assert(s == "[" or s == "]")
buf[#buf + 1] = s
local c = next_char(ls)
while c == "=" do
buf[#buf + 1] = c
c = next_char(ls)
cnt = cnt + 1
end
return c == s and cnt or ((-cnt) - 1)
end
local read_long_string = function(ls, sep, cmt)
local buf = {}
local c = next_char(ls)
if c == "\n" or c == "\r" then c = next_line(ls) end
while true do
if not c then
lex_error(ls, tok and cmt and "unfinished long comment"
or "unfinished long string", "<eof>")
elseif c == "]" then
local tbuf = {}
if skip_sep(ls, tbuf) == sep then
c = next_char(ls)
break
else
buf[#buf + 1] = tconc(tbuf)
end
c = ls.current
else
buf[#buf + 1] = c
c = next_char(ls)
end
end
return tconc(buf)
end
local read_string = function(ls)
local delim = ls.current
local buf = {}
local c = next_char(ls)
while c ~= delim do
if not c then lex_error(ls, "unfinished string", "<eof>")
elseif c == "\n" or c == "\r" then
lex_error(ls, "unfinished string", tconc(buf))
elseif c == "\\" then
c = next_char(ls)
if c == "n" then
buf[#buf + 1] = "\n"
else
buf[#buf + 1] = "\\" .. c
end
c = next_char(ls)
else
buf[#buf + 1] = c
c = next_char(ls)
end
end
next_char(ls)
return tconc(buf)
end
local last_comment = false
local match_comment = function(ls, cmt)
cmt = cmt:match("^%s*(.+)%s*$")
if ls.flags[cmt] then
return "<flagcomment>", cmt
end
local lcmt = ls.lex_cmt
if not lcmt then return nil end
if type(lcmt) ~= "string" then
return "<comment>", cmt
end
lcmt = lcmt:match("^%s*(.+)%s*$")
if last_comment or cmt:sub(1, #lcmt) == lcmt then
return "<comment>", cmt
end
return nil
end
local lex_tbl = {
["\n"] = function(ls) next_line(ls) end,
[" " ] = function(ls) next_char(ls) end,
["-" ] = function(ls)
local c = next_char(ls)
if c ~= "-" then
return "-"
end
c = next_char(ls)
if c == "[" then
local sep = skip_sep(ls, {})
if sep >= 0 then
return match_comment(ls, read_long_string(ls, sep, true))
end
end
local buf = {}
while ls.current and ls.current ~= "\n" and ls.current ~= "\r" do
buf[#buf + 1] = ls.current
next_char(ls)
end
return match_comment(ls, tconc(buf))
end,
["[" ] = function(ls)
local buf = {}
local sep = skip_sep(ls, {})
if sep >= 0 then
return "<string>", read_long_string(ls, sep)
elseif sep == -1 then
return "["
else
lex_error(ls, "invalid long string delimiter", tconc(buf))
end
end,
["="] = function(ls)
local oc = ls.current
local c = next_char(ls)
if c ~= "=" then return c
else next_char(ls); return c .. "=" end
end,
['"' ] = function(ls)
return "<string>", read_string(ls)
end,
["." ] = function(ls)
local c = next_char(ls)
if c == "." then
c = next_char(ls)
if c == "." then
next_char(ls)
return "..."
else
return ".."
end
elseif c:match("%d") then
return "<number>", read_number(ls, ".")
else
return "."
end
end,
["0" ] = function(ls)
return "<number>", read_number(ls)
end
}
lex_tbl["\r"] = lex_tbl["\n"]
lex_tbl["\f"] = lex_tbl[" " ]
lex_tbl["\t"] = lex_tbl[" " ]
lex_tbl["\v"] = lex_tbl[" " ]
lex_tbl["<" ] = lex_tbl["=" ]
lex_tbl[">" ] = lex_tbl["=" ]
lex_tbl["~" ] = lex_tbl["=" ]
lex_tbl["'" ] = lex_tbl['"' ]
lex_tbl["1" ] = lex_tbl["0" ]
lex_tbl["2" ] = lex_tbl["0" ]
lex_tbl["3" ] = lex_tbl["0" ]
lex_tbl["4" ] = lex_tbl["0" ]
lex_tbl["5" ] = lex_tbl["0" ]
lex_tbl["6" ] = lex_tbl["0" ]
lex_tbl["7" ] = lex_tbl["0" ]
lex_tbl["8" ] = lex_tbl["0" ]
lex_tbl["9" ] = lex_tbl["0" ]
local lex_default = function(ls)
local c = ls.current
if c == "_" or c:match("%a") then
local buf = {}
repeat
buf[#buf + 1] = c
c = next_char(ls)
if not c then break end
until not (c == "_" or c:match("%w"))
local str = tconc(buf)
if keywords[str] then
return str
end
return "<name>", str
else
next_char(ls)
return c
end
end
local lex_main = function(ls)
yield()
while true do
local c = ls.current
if c == nil then
return "<eof>"
end
local opt = lex_tbl[c]
if opt then
local t, v = opt(ls)
if t then
last_comment = t == "<comment>"
yield(t, v)
end
else
last_comment = false
yield(lex_default(ls))
end
end
end
local strstream = function(str)
return str:gmatch(".")
end
local skip_bom = function(rdr)
local c = rdr()
if c ~= 0xEF then return c end
c = rdr()
if c ~= 0xBB then return c end
c = rdr()
if c ~= 0xBF then return c end
return rdr()
end
local skip_shebang = function(rdr)
local c = skip_bom(rdr)
if c == 35 then -- #
repeat
c = rdr()
until not c or is_newline(c)
local e = c
c = rdr()
if (e == 10 and c == 13) or (e == 13 and c == 10) then -- LF, CR
c = rdr()
end
end
return c
end
local ls_get = function(self)
local tok = self.token
tok.name, tok.value = self.coro()
return tok
end
return { init = function(chunkname, input, flags, opts)
local reader = type(input) == "string" and strstream(input) or input
local current = skip_shebang(reader)
local ls = {
reader = reader,
token = {},
source = chunkname,
current = current,
line_number = 1,
get = ls_get,
lex_cmt = opts["c"],
flags = flags.valid
}
local coro = coroutine.wrap(lex_main, ls)
ls.coro = coro
coro(ls)
return ls
end, syntax_error = syntax_error, source_to_msg = source_to_msg }

View File

@ -1,267 +0,0 @@
-- Elua xgettext: parser
local util = require("util")
local lexer = require("xgettext.lexer")
local syntax_error = lexer.syntax_error
local yield = coroutine.yield
local tconc = table.concat
local String = util.Object:clone {
__ctor = function(self, sing, plu, ctx, cmt, xcmt, flags, line)
self.singular = sing
self.plural = plu
self.context = ctx
self.comment = cmt
self.xcomment = xcmt
self.flags = flags
self.line = line
end,
guess_flags = function(self, flags)
end,
gen_flags = function(self)
local flags = {}
for i, v in ipairs(self.flags) do flags[i] = v end
if self.parent then
self.parent:add_flags(self, flags)
end
self:guess_flags(flags)
return flags
end,
generate = function(self)
yield {
self.singular, self.plural, context = self.context,
comment = self.xcomment, comment = self.comment, line = self.line,
flags = self:gen_flags()
}
end
}
local Call = util.Object:clone {
__ctor = function(self, flags, args)
self.flags = flags
self.args = args
for i, v in ipairs(args) do
v.parent = self
end
end,
add_flags = function(self, argo, flags, flagstr)
local argn
for i, a in ipairs(self.args) do
if a == argo then
argn = i
break
end
end
for i, flag in ipairs(self.flags) do
if flag[1] == argn then
local pass = flag[2]:match("^pass%-(.+)$")
if not flagstr or flagstr == pass or flagstr == flag[2] then
if pass then
self.parent:add_flags(self, flags, pass)
else
flags[#flags + 1] = flag[2]
end
end
end
end
end,
generate = function(self)
for i, v in ipairs(self.args) do
v:generate()
end
end
}
local saved_flagcomments = {}
local saved_comments = {}
local check_match = function(ls, a, b, line)
if ls.token.name ~= a then
if line == ls.line_number then
syntax_error(ls, "'" .. a .. "' expected")
else
syntax_error(ls, "'" .. a .. "' expected (to close '" .. b
.. "' at line " .. line .. ")")
end
end
end
local parse_simple_expr = function(ls)
local tok = ls.token
local tn = tok.name
if tn == "(" then
local line = ls.line_number
ls:get()
local v, tn = parse_expr(ls)
check_match(ls, ")", "(", line)
ls:get()
return v, tn
elseif tn == "<string>" or tn == "<number>" or tn == "<name>" then
local v = tok.value
ls:get()
return v, tn
else
syntax_error(ls, "unexpected symbol")
end
end
local parse_expr
parse_expr = function(ls)
local tok = ls.token
local line = ls.line_number
local lhs, tn = parse_simple_expr(ls)
while true do
if tok.name ~= ".." then break end
if tn ~= "<string>" and tn ~= "<number>" then
syntax_error(ls, "invalid lhs for concat")
end
tn = "<string>"
ls:get()
local rhs, rtn = parse_expr(ls)
if rtn ~= "<string>" and rtn ~= "<number>" then
syntax_error(ls, "invalid rhs for concat")
end
lhs = lhs .. rhs
end
return lhs, tn
end
local parse_arglist = function(ls)
local tok = ls.token
local rets = {}
while true do
rets[#rets + 1] = { parse_expr(ls) }
if tok.name == "," then
ls:get()
else
break
end
end
return rets
end
local parse_kwcall = function(ls)
local tok = ls.token
if tok.name == "(" then
local line = ls.line_number
ls:get()
if tok.name == ")" then
ls:get()
return {}
end
local al = parse_arglist(ls)
check_match(ls, ")", "(", line)
ls:get()
return al
elseif tok.name == "<string>" then
local v = tok.value
ls:get()
return { { v, "<string>" } }
else
return nil
end
end
local parse_kw = function(ls, keywords)
local tok = ls.token
local line = ls.line_number
local kw = keywords[tok.value]
ls:get()
local args = parse_kwcall(ls)
local n1, n2, cx, an = kw[1], kw[2], kw.context, kw.argnum
local n1arg, n2arg, cxarg = args[n1], args[n2], args[cx]
local n1argt, n2argt, cxargt = n1arg and (n1arg[2] ~= "<name>"),
n2arg and (n2arg[2] ~= "<name>"),
cxarg and (cxarg[2] ~= "<name>")
if not args then return false end
if an and #args ~= an then return false end
if #args < n1 then return false end
if n2 and #args < n2 then return false end
if cx and #args < cx then return false end
if not n1argt then return false end
if n2 and not n2argt then return false end
if cx and not cxargt then return false end
local sc = saved_comments
saved_comments = {}
sc = tconc(sc, "\n")
local fsc = saved_flagcomments
saved_flagcomments = {}
return String(n1arg[1], n2 and n2arg[1] or nil, cx and cxarg[1] or nil,
sc, kw.xcomment, fsc, line)
end
local parse_fg = function(ls, flags, keywords)
error("NYI")
end
local parse = function(ls, keywords, flags)
yield()
local tok = ls.token
while tok.name ~= "<eof>" do
if tok.name == "<comment>" then
saved_comments[#saved_comments + 1] = tok.value
ls:get()
elseif tok.name == "<flagcomment>" then
saved_flagcomments[#saved_flagcomments + 1] = tok.value
ls:get()
elseif tok.name == "<name>" then
if keywords[tok.value] then
local status, str = pcall(parse_kw, keywords)
if status and str then
str:generate()
end
elseif flags[tok.value] then
local status, call = pcall(parse_fg, flags, keywords)
if status then
call:generate()
end
else
ls:get()
end
else
ls:get()
end
end
end
local parse_all = function(ls)
yield()
local tok = ls.token
while tok.name ~= "<eof>" do
if tok.name == "<comment>" then
saved_comments[#saved_comments + 1] = tok.value
ls:get()
elseif tok.name == "<flagcomment>" then
saved_flagcomments[#saved_flagcomments + 1] = tok.value
ls:get()
elseif tok.name == "<string>" then
local line = ls.line_number
local val = tok.value
local sc = saved_comments
saved_comments = {}
sc = tconc(sc, "\n")
local fsc = saved_flagcomments
saved_flagcomments = {}
ls:get()
String(val, nil, nil, sc, nil, fsc, line):generate()
else
ls:get()
end
end
end
return { init = function (chunkname, input, keywords, flags, opts)
local ls = lexer.init(chunkname, input, flags, opts)
ls:get()
local coro = coroutine.wrap(opts["a"] and parse_all or parse, ls,
keywords, flags)
coro(ls, keywords)
return coro
end }