forked from enlightenment/efl
elua: prepare xgettext for flag comments and try to emulate xgettext's silly behavior when it comes to concatenating comments
This commit is contained in:
parent
d8caf323d8
commit
9d5b2d433e
|
@ -14,7 +14,7 @@ local input_sources = {}
|
|||
local search_dirs = {}
|
||||
local excluded_files = {}
|
||||
local keywords = {}
|
||||
local flags = {}
|
||||
local flags = { valid = {} }
|
||||
|
||||
local opts_final = {}
|
||||
local opts_nolua = {}
|
||||
|
|
|
@ -144,14 +144,20 @@ local read_string = function(ls)
|
|||
return tconc(buf)
|
||||
end
|
||||
|
||||
local last_comment = false
|
||||
|
||||
local match_comment = function(ls, cmt)
|
||||
cmt = cmt:match("^%s*(.+)%s*$")
|
||||
if ls.flags[cmt] then
|
||||
return "<flagcomment>", cmt
|
||||
end
|
||||
local lcmt = ls.lex_cmt
|
||||
if not lcmt then return nil end
|
||||
if type(lcmt) ~= "string" then
|
||||
return "<comment>", cmt
|
||||
end
|
||||
lcmt = lcmt:match("^%s*(.+)$")
|
||||
if cmt:match("^%s*(.+)$"):sub(1, #lcmt) == lcmt then
|
||||
lcmt = lcmt:match("^%s*(.+)%s*$")
|
||||
if last_comment or cmt:sub(1, #lcmt) == lcmt then
|
||||
return "<comment>", cmt
|
||||
end
|
||||
return nil
|
||||
|
@ -249,8 +255,12 @@ local lex_main = function(ls)
|
|||
local opt = lex_tbl[c]
|
||||
if opt then
|
||||
local t, v = opt(ls)
|
||||
if t then yield(t, v) end
|
||||
if t then
|
||||
last_comment = t == "<comment>"
|
||||
yield(t, v)
|
||||
end
|
||||
else
|
||||
last_comment = false
|
||||
yield(lex_default(ls))
|
||||
end
|
||||
end
|
||||
|
@ -291,7 +301,7 @@ local ls_get = function(self)
|
|||
return tok
|
||||
end
|
||||
|
||||
return { init = function(chunkname, input, opts)
|
||||
return { init = function(chunkname, input, flags, opts)
|
||||
local reader = type(input) == "string" and strstream(input) or input
|
||||
local current = skip_shebang(reader)
|
||||
local ls = {
|
||||
|
@ -301,7 +311,8 @@ return { init = function(chunkname, input, opts)
|
|||
current = current,
|
||||
line_number = 1,
|
||||
get = ls_get,
|
||||
lex_cmt = opts["c"]
|
||||
lex_cmt = opts["c"],
|
||||
flags = flags.valid
|
||||
}
|
||||
local coro = coroutine.wrap(lex_main, ls)
|
||||
ls.coro = coro
|
||||
|
|
|
@ -5,8 +5,10 @@ local lexer = require("xgettext.lexer")
|
|||
local syntax_error = lexer.syntax_error
|
||||
|
||||
local yield = coroutine.yield
|
||||
local tconc = table.concat
|
||||
|
||||
local saved_comment
|
||||
local saved_flagcomments = {}
|
||||
local saved_comments = {}
|
||||
|
||||
local check_match = function(ls, a, b, line)
|
||||
if ls.token.name ~= a then
|
||||
|
@ -100,7 +102,10 @@ local parse = function(ls, keywords)
|
|||
local tok = ls.token
|
||||
while tok.name ~= "<eof>" do
|
||||
if tok.name == "<comment>" then
|
||||
saved_comment = tok.value
|
||||
saved_comments[#saved_comments + 1] = tok.value
|
||||
ls:get()
|
||||
elseif tok.name == "<flagcomment>" then
|
||||
saved_flagcomments[#saved_flagcomments + 1] = tok.value
|
||||
ls:get()
|
||||
elseif tok.name == "<name>" and keywords[tok.value] then
|
||||
local line = ls.line_number
|
||||
|
@ -120,11 +125,15 @@ local parse = function(ls, keywords)
|
|||
if not n1argt then goto skip end
|
||||
if n2 and not n2argt then goto skip end
|
||||
if cx and not cxargt then goto skip end
|
||||
local sc = saved_comment
|
||||
saved_comment = nil
|
||||
local sc = saved_comments
|
||||
saved_comments = {}
|
||||
sc = tconc(sc, "\n")
|
||||
local fsc = saved_flagcomments
|
||||
saved_flagcomments = {}
|
||||
yield {
|
||||
n1arg[1], n2 and n2arg[1], context = cx and cxarg[1],
|
||||
xcomment = kw.xcomment, comment = sc, line = line
|
||||
xcomment = kw.xcomment, comment = sc, line = line,
|
||||
flags = fsc
|
||||
}
|
||||
else
|
||||
ls:get()
|
||||
|
@ -154,7 +163,7 @@ local parse_all = function(ls)
|
|||
end
|
||||
|
||||
return { init = function (chunkname, input, keywords, flags, opts)
|
||||
local ls = lexer.init(chunkname, input, opts)
|
||||
local ls = lexer.init(chunkname, input, flags, opts)
|
||||
ls:get()
|
||||
local coro = coroutine.wrap(opts["a"] and parse_all or parse, ls, keywords)
|
||||
coro(ls, keywords)
|
||||
|
|
Loading…
Reference in New Issue