forked from enlightenment/efl
docgen: migrate to new Eolian-provided tokenizer
This commit is contained in:
parent
91e84aa3b8
commit
2f5db8c7d5
|
@ -1551,7 +1551,7 @@ M.documentation_string_split = function(str)
|
|||
local sep = str:find("\n\n", 1, true)
|
||||
local ret = {}
|
||||
while true do
|
||||
local pstr = (sep and str:sub(1, sep - 1) or pstr):match("^%s*(.-)%s*$")
|
||||
local pstr = (sep and str:sub(1, sep - 1) or str):match("^%s*(.-)%s*$")
|
||||
if #pstr > 0 then
|
||||
ret[#ret + 1] = pstr
|
||||
end
|
||||
|
|
|
@ -43,7 +43,7 @@ local gen_doc_refd = function(str)
|
|||
if not str then
|
||||
return nil
|
||||
end
|
||||
local pars = dutil.str_split(str, "\n\n")
|
||||
local pars = eolian.documentation_string_split(str)
|
||||
for i = 1, #pars do
|
||||
pars[i] = writer.Buffer():write_par(pars[i]):finish()
|
||||
end
|
||||
|
@ -1269,94 +1269,76 @@ M.Expression = Node:clone {
|
|||
end
|
||||
}
|
||||
|
||||
local decl_to_nspace = function(decl)
|
||||
local dt = eolian.declaration_type
|
||||
local decltypes = {
|
||||
[dt.ALIAS] = "alias",
|
||||
[dt.STRUCT] = "struct",
|
||||
[dt.ENUM] = "enum",
|
||||
[dt.VAR] = "var"
|
||||
M.DocTokenizer = Node:clone {
|
||||
UNKNOWN = eolian.doc_token_type.UNKNOWN,
|
||||
TEXT = eolian.doc_token_type.TEXT,
|
||||
REF = eolian.doc_token_type.REF,
|
||||
MARK_NOTE = eolian.doc_token_type.MARK_NOTE,
|
||||
MARK_WARNING = eolian.doc_token_type.MARK_WARNING,
|
||||
MARK_REMARK = eolian.doc_token_type.MARK_REMARK,
|
||||
MARK_TODO = eolian.doc_token_type.MARK_TODO,
|
||||
MARKUP_MONOSPACE = eolian.doc_token_type.MARKUP_MONOSPACE,
|
||||
|
||||
__ctor = function(self, str)
|
||||
self.tok = eolian.doc_token_init()
|
||||
self.str = str
|
||||
assert(self.str)
|
||||
assert(self.tok)
|
||||
end,
|
||||
|
||||
tokenize = function(self)
|
||||
self.str = eolian.documentation_tokenize(self.str, self.tok)
|
||||
return not not self.str
|
||||
end,
|
||||
|
||||
text_get = function(self)
|
||||
return self.tok:text_get()
|
||||
end,
|
||||
|
||||
type_get = function(self)
|
||||
return self.tok:type_get()
|
||||
end,
|
||||
|
||||
ref_get = function(self, root)
|
||||
local tp, d1, d2 = self.tok:ref_get()
|
||||
local reft = eolian.doc_ref_type
|
||||
local ret
|
||||
if tp == reft.CLASS or tp == reft.FUNC or tp == reft.EVENT then
|
||||
ret = { M.Class(d1):type_str_get() }
|
||||
if not ret[1] then
|
||||
error("unknown class type for class '"
|
||||
.. d1:full_name_get() .. "'")
|
||||
end
|
||||
elseif tp == reft.ALIAS then
|
||||
ret = { "alias" }
|
||||
elseif tp == reft.STRUCT or tp == reft.STRUCT_FIELD then
|
||||
-- TODO: point to field
|
||||
ret = { "struct" }
|
||||
elseif tp == reft.ENUM or tp == reft.ENUM_FIELD then
|
||||
-- TODO: point to field
|
||||
ret = { "enum" }
|
||||
elseif tp == reft.VAR then
|
||||
ret = { "var" }
|
||||
else
|
||||
error("invalid reference '" .. self:text_get() .. "'")
|
||||
end
|
||||
for tok in d1:full_name_get():gmatch("[^%.]+") do
|
||||
ret[#ret + 1] = tok:lower()
|
||||
end
|
||||
if tp == reft.FUNC then
|
||||
local fid = M.Function(d2)
|
||||
ret[#ret + 1] = fid:type_str_get()
|
||||
ret[#ret + 1] = fid:name_get():lower()
|
||||
elseif tp == reft.EVENT then
|
||||
ret[#ret + 1] = "event"
|
||||
ret[#ret + 1] = d2:name_get():lower()
|
||||
end
|
||||
if root ~= nil then
|
||||
ret[#ret + 1] = not not root
|
||||
end
|
||||
return ret
|
||||
end
|
||||
}
|
||||
local ns = decltypes[decl:type_get()]
|
||||
if ns then
|
||||
return ns
|
||||
elseif decl:type_get() == dt.CLASS then
|
||||
local ret = M.Class(decl:class_get()):type_str_get()
|
||||
if not ret then
|
||||
error("unknown class type for class '" .. decl:name_get() .. "'")
|
||||
end
|
||||
return ret
|
||||
else
|
||||
error("unknown declaration type for declaration '"
|
||||
.. decl:name_get() .. "'")
|
||||
end
|
||||
end
|
||||
|
||||
M.ref_get = function(str, root)
|
||||
local decl = eolian.declaration_get_by_name(str)
|
||||
if decl then
|
||||
local t = { decl_to_nspace(decl) }
|
||||
for tok in str:gmatch("[^%.]+") do
|
||||
t[#t + 1] = tok:lower()
|
||||
end
|
||||
if root ~= nil then t[#t + 1] = not not root end
|
||||
return t
|
||||
end
|
||||
|
||||
-- field or func
|
||||
local bstr = str:match("(.+)%.[^.]+")
|
||||
if not bstr then
|
||||
error("invalid reference '" .. str .. "'")
|
||||
end
|
||||
|
||||
local sfx = str:sub(#bstr + 1)
|
||||
|
||||
decl = eolian.declaration_get_by_name(bstr)
|
||||
if decl then
|
||||
local dt = eolian.declaration_type
|
||||
local tp = decl:type_get()
|
||||
if tp == dt.STRUCT or tp == dt.ENUM then
|
||||
-- TODO: point to the actual item
|
||||
return M.ref_get(bstr, root)
|
||||
end
|
||||
end
|
||||
|
||||
local cl = M.Class.by_name_get(bstr)
|
||||
local fn
|
||||
local ftype = M.Function.UNRESOLVED
|
||||
if not cl then
|
||||
if sfx == ".get" then
|
||||
ftype = M.Function.PROP_GET
|
||||
elseif sfx == ".set" then
|
||||
ftype = M.Function.PROP_SET
|
||||
end
|
||||
local mname
|
||||
if ftype ~= M.Function.UNRESOLVED then
|
||||
mname = bstr:match(".+%.([^.]+)")
|
||||
if not mname then
|
||||
error("invalid reference '" .. str .. "'")
|
||||
end
|
||||
bstr = bstr:match("(.+)%.[^.]+")
|
||||
cl = M.Class.by_name_get(bstr)
|
||||
if cl then
|
||||
fn = cl:function_get_by_name(mname, ftype)
|
||||
end
|
||||
end
|
||||
else
|
||||
fn = cl:function_get_by_name(sfx:sub(2), ftype)
|
||||
if fn then ftype = fn:type_get() end
|
||||
end
|
||||
|
||||
if not fn or not fn:type_str_get() then
|
||||
error("invalid reference '" .. str .. "'")
|
||||
end
|
||||
|
||||
local ret = M.ref_get(bstr)
|
||||
ret[#ret + 1] = fn:type_str_get()
|
||||
ret[#ret + 1] = fn:name_get():lower()
|
||||
if root ~= nil then ret[#ret + 1] = not not root end
|
||||
return ret
|
||||
end
|
||||
|
||||
M.scan_directory = function(dir)
|
||||
if not dir then
|
||||
|
|
|
@ -42,26 +42,6 @@ M.rm_root = function()
|
|||
cutil.file_rmrf(M.path_join(doc_root, "auto"))
|
||||
end
|
||||
|
||||
M.str_split = function(str, delim)
|
||||
if not str then
|
||||
return nil
|
||||
end
|
||||
local s, e = str:find(delim, 1, true)
|
||||
if not s then
|
||||
return { str }
|
||||
end
|
||||
local t = {}
|
||||
while s do
|
||||
t[#t + 1] = str:sub(1, s - 1)
|
||||
str = str:sub(e + 1)
|
||||
s, e = str:find(delim, 1, true)
|
||||
if not s then
|
||||
t[#t + 1] = str
|
||||
end
|
||||
end
|
||||
return t
|
||||
end
|
||||
|
||||
M.init = function(root, rns)
|
||||
doc_root = root:gsub(rep_sep, path_sep)
|
||||
root_ns = rns
|
||||
|
|
|
@ -359,93 +359,50 @@ M.Writer = util.Object:clone {
|
|||
return self
|
||||
end,
|
||||
|
||||
write_par_markup = function(self, str)
|
||||
self:write_raw("%%")
|
||||
local f = str:gmatch(".")
|
||||
local c = f()
|
||||
while c do
|
||||
if c == "\\" then
|
||||
c = f()
|
||||
if c ~= "@" and c ~= "$" then
|
||||
self:write_raw("\\")
|
||||
end
|
||||
self:write_raw(c)
|
||||
c = f()
|
||||
elseif c == "$" then
|
||||
c = f()
|
||||
if c and c:match("[a-zA-Z_]") then
|
||||
local wbuf = { c }
|
||||
c = f()
|
||||
while c and c:match("[a-zA-Z0-9_]") do
|
||||
wbuf[#wbuf + 1] = c
|
||||
c = f()
|
||||
end
|
||||
self:write_raw("%%''" .. table.concat(wbuf) .. "''%%")
|
||||
else
|
||||
self:write_raw("$")
|
||||
end
|
||||
elseif c == "@" then
|
||||
c = f()
|
||||
if c and c:match("[a-zA-Z_]") then
|
||||
local rbuf = { c }
|
||||
c = f()
|
||||
while c and c:match("[a-zA-Z0-9_.]") do
|
||||
rbuf[#rbuf + 1] = c
|
||||
c = f()
|
||||
end
|
||||
local ldot = false
|
||||
if rbuf[#rbuf] == "." then
|
||||
ldot = true
|
||||
rbuf[#rbuf] = nil
|
||||
end
|
||||
local title = table.concat(rbuf)
|
||||
self:write_raw("%%")
|
||||
self:write_link(dtree.ref_get(title, true), title)
|
||||
self:write_raw("%%")
|
||||
if ldot then
|
||||
self:write_raw(".")
|
||||
end
|
||||
else
|
||||
self:write_raw("@")
|
||||
end
|
||||
elseif c == "%" then
|
||||
c = f()
|
||||
if c == "%" then
|
||||
c = f()
|
||||
self:write_raw("%%<nowiki>%%</nowiki>%%")
|
||||
else
|
||||
self:write_raw("%")
|
||||
end
|
||||
else
|
||||
self:write_raw(c)
|
||||
c = f()
|
||||
end
|
||||
end
|
||||
self:write_raw("%%")
|
||||
return self
|
||||
end,
|
||||
|
||||
write_par = function(self, str)
|
||||
local tokp = dtree.DocTokenizer(str)
|
||||
local notetypes = M.has_feature("notes") and {
|
||||
["Note: "] = "<note>\n",
|
||||
["Warning: "] = "<note warning>\n",
|
||||
["Remark: "] = "<note tip>\n",
|
||||
["TODO: "] = "<note>\n**TODO:** "
|
||||
[tokp.MARK_NOTE] = "<note>\n",
|
||||
[tokp.MARK_WARNING] = "<note warning>\n",
|
||||
[tokp.MARK_REMARK] = "<note tip>\n",
|
||||
[tokp.MARK_TODO] = "<note>\n**TODO:** "
|
||||
} or {}
|
||||
local tag
|
||||
for k, v in pairs(notetypes) do
|
||||
if str:match("^" .. k) then
|
||||
tag = v
|
||||
str = str:sub(#k + 1)
|
||||
break
|
||||
end
|
||||
end
|
||||
if tag then
|
||||
local hasraw, hasnote = false, false
|
||||
while tokp:tokenize() do
|
||||
local tp = tokp:type_get()
|
||||
if notetypes[tp] then
|
||||
self:write_raw(tag)
|
||||
self:write_par_markup(str)
|
||||
self:write_raw("\n</note>")
|
||||
hasnote = true
|
||||
else
|
||||
self:write_par_markup(str)
|
||||
if not hasraw then
|
||||
self:write_raw("%%")
|
||||
hasraw = true
|
||||
end
|
||||
if tp == tokp.REF then
|
||||
local reft = tokp:ref_get(true)
|
||||
local str = tokp:text_get()
|
||||
if str:sub(1, 1) == "[" then
|
||||
str = str:sub(2, #str - 1)
|
||||
end
|
||||
self:write_raw("%%")
|
||||
self:write_link(reft, str)
|
||||
self:write_raw("%%")
|
||||
else
|
||||
local str = tokp:text_get()
|
||||
assert(str, "internal tokenizer error")
|
||||
-- replace possible %% chars
|
||||
str = str:gsub("%%%%", "%%%%<nowiki>%%%%</nowiki>%%%%")
|
||||
if tp == tokp.MARKUP_MONOSPACE then
|
||||
self:write_raw("%%''" .. str .. "''%%")
|
||||
else
|
||||
self:write_raw(str)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
self:write_raw("%%")
|
||||
if hasnote then
|
||||
self:write_raw("\n</note>")
|
||||
end
|
||||
return self
|
||||
end,
|
||||
|
|
Loading…
Reference in New Issue