dotfiles

personal dotfiles
git clone anongit@rnpnr.xyz:dotfiles.git
Log | Files | Refs | Feed | Submodules

highlight.lua (2946B)


      1 local M = {}
      2 
      3 local lpeg_pattern
      4 local get_keywords = function(win, range_or_text)
      5 	if not lpeg_pattern then
      6 		local lpeg = vis.lpeg
      7 		local keywords = M.keywords
      8 
      9 		-- TODO: can't this be done better?
     10 		local words
     11 		for tag, _ in pairs(keywords) do
     12 			if words then
     13 				words = words + lpeg.P(tag)
     14 			else
     15 				words = lpeg.P(tag)
     16 			end
     17 		end
     18 		if not words then return end
     19 		local cap = (1 - words)^0 * (lpeg.Cp() * words * lpeg.Cp())
     20 		lpeg_pattern = lpeg.Ct(cap * ((1 - words) * cap)^0)
     21 	end
     22 
     23 	local txt = range_or_text
     24 	if type(range_or_text) == 'table' then
     25 		txt = win.file:content(range_or_text)
     26 	end
     27 
     28 	local kws = lpeg_pattern:match(txt)
     29 	if not kws then return end
     30 
     31 	local i, kwt = 1, {}
     32 	repeat
     33 		local kw = txt:sub(kws[i], kws[i + 1] - 1)
     34 		table.insert(kwt, {kws[i], kws[i + 1] - 1, kw})
     35 		i = i + 2
     36 	until (i > #kws)
     37 	return kwt
     38 end
     39 
     40 local last_data
     41 local last_modified_toks
     42 local wrap_lexer = function(win)
     43 	if not M.keywords then return end
     44 	if not win.syntax or not vis.lexers.load then return end
     45 
     46 	local vlexer = vis.lexers.load(win.syntax, nil, true)
     47 	if not vlexer or not vlexer.lex then return end
     48 	local old_lex_func = vlexer.lex
     49 
     50 	-- Append new tags to lexer
     51 	for tag, style in pairs(M.keywords) do
     52 		local tid = vlexer._TAGS[tag]
     53 		if not tid then
     54 			-- NOTE: _TAGS needs to be ordered and _TAGS[tag] needs
     55 			-- to equal the numerical table index of tag in _TAGS
     56 			-- why? ask the scintillua authors ¯\_(ツ)_/¯
     57 			table.insert(vlexer._TAGS, tag)
     58 			tid = #vlexer._TAGS
     59 			vlexer._TAGS[tag] = tid
     60 			assert(tid < win.STYLE_LEXER_MAX)
     61 		end
     62 		win:style_define(tid, style)
     63 	end
     64 
     65 	vlexer.lex = function(lexer, data, index)
     66 		local tokens = old_lex_func(lexer, data, index)
     67 		local new_tokens = {}
     68 		local kwt
     69 		if last_data ~= data then
     70 			kwt = get_keywords(win, data)
     71 			if not kwt then return tokens end
     72 			last_data = data
     73 		else
     74 			return last_modified_toks
     75 		end
     76 
     77 		local i = 1
     78 		for _, kwp in ipairs(kwt) do repeat
     79 			if i > #tokens - 1 then break end
     80 			local token_type = tokens[i]
     81 			local token_start = (tokens[i - 1] or 1) - 1
     82 			local token_end = tokens[i + 1]
     83 			local kws = kwp[1]
     84 			local kwe = kwp[2]
     85 
     86 			if token_end < kws then
     87 				table.insert(new_tokens, token_type)
     88 				table.insert(new_tokens, token_end)
     89 				i = i + 2
     90 			else
     91 				-- if kw is within token we need to split
     92 				-- the initial part of token off
     93 				if kws - 1 > token_start then
     94 					table.insert(new_tokens, token_type)
     95 					table.insert(new_tokens, kws)
     96 				end
     97 				table.insert(new_tokens, kwp[3])
     98 				if token_end < kwe then
     99 					table.insert(new_tokens, token_end + 1)
    100 					i = i + 2
    101 				else
    102 					table.insert(new_tokens, kwe + 1)
    103 				end
    104 			end
    105 		until (not token_end or token_end >= kwe) end
    106 		-- copy over remaining tokens
    107 		for j = i, #tokens, 1 do
    108 			table.insert(new_tokens, tokens[j])
    109 		end
    110 		last_modified_toks = new_tokens
    111 		return new_tokens
    112 	end
    113 end
    114 
    115 vis.events.subscribe(vis.events.WIN_OPEN, wrap_lexer)
    116 
    117 return M