upload to git
This commit is contained in:
commit
8b4f32bc9f
227
R2H2.lua
Normal file
227
R2H2.lua
Normal file
@ -0,0 +1,227 @@
|
|||||||
|
--functions
|
||||||
|
function PrintPosition(string)
|
||||||
|
local print_text = ""
|
||||||
|
for i = 1, string.len(string) do
|
||||||
|
while i >= 10 do i = i - 10 end
|
||||||
|
print_text = print_text .. tostring(i)
|
||||||
|
end
|
||||||
|
print("pos: " .. print_text)
|
||||||
|
print_text = ""
|
||||||
|
for i = 1, string.len(string) do
|
||||||
|
i = math.floor(i/10)
|
||||||
|
while i >= 10 do i = i - 10 end
|
||||||
|
if i == 0 then i = " " end
|
||||||
|
print_text = print_text .. tostring(i)
|
||||||
|
end
|
||||||
|
print(" " ..print_text)
|
||||||
|
end
|
||||||
|
|
||||||
|
-- DATA
|
||||||
|
-- vowels
|
||||||
|
vowel_table = {"a","e","i","o","u"}
|
||||||
|
symbol_vowel = {"","","","",""}
|
||||||
|
symbol_extending_vowel = {"","","","",""}
|
||||||
|
-- consonants
|
||||||
|
consonant_table = {"g","sh","r","ny","ch","n","y","f","t","k","w","l","p","b","d","h"}
|
||||||
|
symbol_consonant = {"","","","","","","","","","","","","","","",""}
|
||||||
|
symbol_extending_consonant = {"","","","","","","","","","","","","","","",""}
|
||||||
|
-- composites
|
||||||
|
composing_consonant_table = {"g","sh","r","ny","ch","m","y","f","t","k","w","l","p","b","d","h"}
|
||||||
|
symbol_composite = {
|
||||||
|
{"","","","","","","","","","","","","","","",""},
|
||||||
|
{"","","","","","","","","","","","","","","",""},
|
||||||
|
{"","","","","","","","","","","","","","","",""},
|
||||||
|
{"","","","","","","","","","","","","","","",""},
|
||||||
|
{"","","","","","","","","","","","","","","",""}
|
||||||
|
}
|
||||||
|
|
||||||
|
-- program start
|
||||||
|
function convertToHeonian(text, ...)
|
||||||
|
local step = true -- this is set to false when a conclusion has been reached
|
||||||
|
local transcribed = false
|
||||||
|
local transcribed_text = ""
|
||||||
|
|
||||||
|
-- verbose?
|
||||||
|
local debug = false
|
||||||
|
|
||||||
|
for _, v in pairs(arg) do
|
||||||
|
if v == "-v"
|
||||||
|
or v == "--verbose"
|
||||||
|
then
|
||||||
|
debug = true
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
-- we check if it's necessary to check anything at all
|
||||||
|
if text == "" then
|
||||||
|
step = false
|
||||||
|
end
|
||||||
|
|
||||||
|
-- prepare text
|
||||||
|
if step ~= false then
|
||||||
|
-- 1. add syllable marker at start if it isn't already present.
|
||||||
|
if string.sub(text,1,1) ~= "." then text = "." .. text end
|
||||||
|
-- 2. turn [x] into [ksh]
|
||||||
|
text = string.gsub(text,"x","ksh")
|
||||||
|
-- 3. turn [z] into [dsh]
|
||||||
|
text = string.gsub(text,"z","d.sh")
|
||||||
|
end
|
||||||
|
|
||||||
|
-- read input and transcribe
|
||||||
|
if step == true then
|
||||||
|
|
||||||
|
-- debug log
|
||||||
|
if debug then print("") end
|
||||||
|
if debug then print("src: ".. text) end
|
||||||
|
if debug then PrintPosition(text) end
|
||||||
|
|
||||||
|
-- sort through all the letters
|
||||||
|
local i = 1
|
||||||
|
while i < string.len(text)+1 do
|
||||||
|
-- know current pos
|
||||||
|
local char_step = true -- this is false when a conclusion has been reached
|
||||||
|
local pos = i
|
||||||
|
local debug_s = ""
|
||||||
|
local new_syllable = false
|
||||||
|
|
||||||
|
if string.sub(text,i,i) == " "
|
||||||
|
or string.sub(text,i,i) == "\t"
|
||||||
|
or string.sub(text,i,i) == "-" then
|
||||||
|
-- adjust i
|
||||||
|
i = i + 1
|
||||||
|
pos = i
|
||||||
|
char_step = false
|
||||||
|
end
|
||||||
|
-- init checkup
|
||||||
|
if string.sub(text,i,i) == "." then
|
||||||
|
-- debug log
|
||||||
|
if debug then print("") end
|
||||||
|
if debug then print(" @[".. tostring(i).."]"..debug_s.." new syllable MARKER found") end
|
||||||
|
-- start syllable
|
||||||
|
new_syllable = true
|
||||||
|
debug_s = ""
|
||||||
|
-- adjust i
|
||||||
|
i = i + 1
|
||||||
|
-- debug log
|
||||||
|
if debug then print(" >>> adjusting by (1) from [".. pos .. "] to [".. i .. "]" ) end
|
||||||
|
-- adjust pos
|
||||||
|
pos = i
|
||||||
|
end
|
||||||
|
|
||||||
|
-- debug log
|
||||||
|
if debug then print("") end
|
||||||
|
if debug then print(" @[".. tostring(i).."]"..debug_s.." checking string: ".. string.sub(text,i,i)) end
|
||||||
|
|
||||||
|
-- lets check if it is a composite
|
||||||
|
if char_step == true then
|
||||||
|
local cons_id = 0
|
||||||
|
local length = 0
|
||||||
|
-- check if its valid consonant for a composite
|
||||||
|
for _, consonant in pairs(composing_consonant_table) do
|
||||||
|
cons_id = cons_id + 1
|
||||||
|
-- get consonant length its checking against, so we can properly compare.
|
||||||
|
length = string.len(consonant)
|
||||||
|
-- debug log
|
||||||
|
--if debug then print(" checking composite consonant: " .. composing_consonant_table[cons_id]) end
|
||||||
|
if string.sub(text,i,i+length-1) == consonant then
|
||||||
|
-- debug log
|
||||||
|
if debug then print(" (!) valid consonant: " .. composing_consonant_table[cons_id]) end
|
||||||
|
-- check if its a valid vowel AFTER the valid consonant, while sorting through all vowels
|
||||||
|
local vowel_id = 0
|
||||||
|
for _, vowel in pairs(vowel_table) do
|
||||||
|
vowel_id = vowel_id + 1
|
||||||
|
--if debug then print(" checking composite: " .. composing_consonant_table[cons_id]..vowel_table[vowel_id]) end
|
||||||
|
if string.sub(text,i+length,i+length) == vowel then
|
||||||
|
-- adjust by consonant length + vowel
|
||||||
|
i = i + string.len(consonant) + 1
|
||||||
|
-- debug log
|
||||||
|
if debug then print(" (!) valid composite: " .. consonant .. vowel ..", length: "..length+1) end
|
||||||
|
if debug then print(" >>> adjusting by (" .. tostring(length+1) .. ") from [".. pos .. "] to [".. i .. "]" ) end
|
||||||
|
-- transcribe; conclude;
|
||||||
|
transcribed_text = transcribed_text .. symbol_composite[vowel_id][cons_id]
|
||||||
|
char_step = false
|
||||||
|
break
|
||||||
|
end
|
||||||
|
end
|
||||||
|
-- no need to check for more consonants if one is valid
|
||||||
|
break
|
||||||
|
end
|
||||||
|
end
|
||||||
|
if debug then if char_step ~= false then print(" [!] invalid composite") end end
|
||||||
|
end
|
||||||
|
|
||||||
|
-- lets check if it is a non composite vowel
|
||||||
|
if char_step == true then
|
||||||
|
local id = 0
|
||||||
|
local length = 0
|
||||||
|
for _, vowel in pairs(vowel_table) do
|
||||||
|
id = id+ 1
|
||||||
|
-- get vowel length its checking against, so we can properly compare.
|
||||||
|
length = string.len(vowel)
|
||||||
|
-- debug log
|
||||||
|
--if debug then print(" checking standalone vowel: " .. vowel_table[id]) end
|
||||||
|
if string.sub(text,i,i+length-1) == vowel then
|
||||||
|
i = i + string.len(vowel)
|
||||||
|
-- debug log
|
||||||
|
if debug then print(" (!) valid vowel: " .. vowel_table[id]) end
|
||||||
|
if debug then print(" >>> adjusting by (" .. tostring(length) .. ") from [".. pos .. "] to [".. i .. "]" ) end
|
||||||
|
-- transcribe; conclude;
|
||||||
|
local table = nil
|
||||||
|
if new_syllable then
|
||||||
|
table = symbol_vowel
|
||||||
|
else
|
||||||
|
table = symbol_extending_vowel
|
||||||
|
end
|
||||||
|
transcribed_text = transcribed_text .. table[id]
|
||||||
|
char_step = false
|
||||||
|
break
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
|
||||||
|
-- lets check if it is a non composite consonant
|
||||||
|
if char_step == true then
|
||||||
|
local id = 0
|
||||||
|
local length = 0
|
||||||
|
for _, consonant in pairs(consonant_table) do
|
||||||
|
id = id+ 1
|
||||||
|
-- get consonant length its checking against, so we can properly compare.
|
||||||
|
length = string.len(consonant)
|
||||||
|
-- debug log
|
||||||
|
--if debug then print(" checking standalone consonant: " .. consonant_table[id]) end
|
||||||
|
if string.sub(text,i,i+length-1) == consonant then
|
||||||
|
i = i + string.len(consonant)
|
||||||
|
-- debug log
|
||||||
|
if debug then print(" (!) valid consonant: " .. consonant_table[id]) end
|
||||||
|
if debug then print(" >>> adjusting by (" .. tostring(length) .. ") from [".. pos .. "] to [".. i .. "]" ) end
|
||||||
|
-- transcribe; conclude;
|
||||||
|
local table = nil
|
||||||
|
if new_syllable then
|
||||||
|
table = symbol_consonant
|
||||||
|
else
|
||||||
|
table = symbol_extending_consonant
|
||||||
|
end
|
||||||
|
transcribed_text = transcribed_text .. table[id]
|
||||||
|
char_step = false
|
||||||
|
break
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
-- if no idea, move on
|
||||||
|
if char_step == true then
|
||||||
|
-- debug log
|
||||||
|
if debug then print(" [!] no idea; moving on to next [".. pos + 1 .."]") end
|
||||||
|
-- no idea
|
||||||
|
transcribed_text = transcribed_text .. "?"
|
||||||
|
i = i + 1
|
||||||
|
-- debug log
|
||||||
|
if debug then print(" >>> adjusting by (1) from [".. pos .. "] to [".. i .. "]" ) end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
-- output
|
||||||
|
return transcribed_text
|
||||||
|
end
|
||||||
|
end
|
34
color.lua
Normal file
34
color.lua
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
function Enum(tbl)
|
||||||
|
for i = 1, #tbl do
|
||||||
|
local v = tbl[i]
|
||||||
|
tbl[v] = i
|
||||||
|
end
|
||||||
|
return tbl
|
||||||
|
end
|
||||||
|
|
||||||
|
COLOR = Enum {
|
||||||
|
"Black",
|
||||||
|
"Red",
|
||||||
|
"Green",
|
||||||
|
"Yellow",
|
||||||
|
"Blue",
|
||||||
|
"Purple",
|
||||||
|
"Cyan",
|
||||||
|
"LightGray",
|
||||||
|
"Gray",
|
||||||
|
"HighRed",
|
||||||
|
"HighGreen",
|
||||||
|
"HighYellow",
|
||||||
|
"HighBlue",
|
||||||
|
"HighPurple",
|
||||||
|
"HighCyan",
|
||||||
|
"White"
|
||||||
|
}
|
||||||
|
|
||||||
|
function colorText(Color, Text)
|
||||||
|
return "\027[38;5;"..tostring(Color-1).."m"..Text.."\027[0;m"
|
||||||
|
end
|
||||||
|
|
||||||
|
function colorTextBackground(Color, Text)
|
||||||
|
return "\027[48;5;"..tostring(Color-1).."m"..Text.."\027[0;m"
|
||||||
|
end
|
29
lexicon
Executable file
29
lexicon
Executable file
@ -0,0 +1,29 @@
|
|||||||
|
#!/usr/bin/env lua
|
||||||
|
|
||||||
|
if not arg[1] then print("no argument, try again") return end
|
||||||
|
|
||||||
|
require "R2H2"
|
||||||
|
require "color"
|
||||||
|
|
||||||
|
adjustTableSizes = require "output_sizes"
|
||||||
|
printOutputTable = require "output_word"
|
||||||
|
|
||||||
|
showList = require "show_list"
|
||||||
|
searchList = require "search_list"
|
||||||
|
|
||||||
|
words = dofile("words.lua")
|
||||||
|
|
||||||
|
if arg[1] == "all" then
|
||||||
|
showList()
|
||||||
|
return
|
||||||
|
end
|
||||||
|
if arg[1] == "search" then
|
||||||
|
if arg[2] then searchList(arg[2])
|
||||||
|
else print("insert query") end
|
||||||
|
return
|
||||||
|
end
|
||||||
|
if arg[1] == "transcript" then
|
||||||
|
if arg[2] then print(convertToHeonian(arg[2]))
|
||||||
|
else print("no string found") end
|
||||||
|
return
|
||||||
|
end
|
29
output_sizes.lua
Normal file
29
output_sizes.lua
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
return function(tbl)
|
||||||
|
local words_max_length = {1,1,1,1,1,1}
|
||||||
|
|
||||||
|
for i=1, #tbl do
|
||||||
|
local this_word = tbl[i][2]
|
||||||
|
for j=1, #this_word-1 do
|
||||||
|
local length = string.len(this_word[j])
|
||||||
|
if length + 1 > words_max_length[j+1] then
|
||||||
|
words_max_length[j+1] = length + 1
|
||||||
|
end
|
||||||
|
end
|
||||||
|
local length = string.len(convertToHeonian(this_word[1]))
|
||||||
|
if length + 1 > words_max_length[1] then
|
||||||
|
words_max_length[1] = length + 1
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
for i=1, #words do
|
||||||
|
for j=1, #words[i]-1 do
|
||||||
|
local times = words_max_length[j+1] - string.len(words[i][j])
|
||||||
|
while times > 0 do
|
||||||
|
words[i][j] = words[i][j] .. " "
|
||||||
|
times = times - 1
|
||||||
|
end
|
||||||
|
words[i][j] = words[i][j] .. " "
|
||||||
|
end
|
||||||
|
end
|
||||||
|
return tbl
|
||||||
|
end
|
21
output_word.lua
Normal file
21
output_word.lua
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
return function(tbl)
|
||||||
|
for i=1, #tbl do
|
||||||
|
local this_number = tbl[i][1]
|
||||||
|
local this_word = tbl[i][2]
|
||||||
|
local text = colorTextBackground(COLOR.Black, "#")
|
||||||
|
local length = string.len(#words) - string.len(this_number)
|
||||||
|
while length > 0 do
|
||||||
|
text = text .. colorTextBackground(COLOR.Black, "0")
|
||||||
|
length = length - 1
|
||||||
|
end
|
||||||
|
text = text .. colorTextBackground(COLOR.Black, this_number .. " ")
|
||||||
|
text = text .. colorTextBackground(COLOR.Black,colorText(COLOR.HighBlue,this_word[1]))
|
||||||
|
text = text .. colorTextBackground(COLOR.Black,colorText(COLOR.Gray,this_word[3]))
|
||||||
|
text = text .. colorTextBackground(COLOR.Black,colorText(COLOR.Green,this_word[2]))
|
||||||
|
|
||||||
|
print(text)
|
||||||
|
text = ""
|
||||||
|
text = text .. colorTextBackground(COLOR.Black,colorText(COLOR.Blue,this_word[4]))
|
||||||
|
text = text .. colorTextBackground(COLOR.Black,colorText(COLOR.Blue,this_word[5]))
|
||||||
|
end
|
||||||
|
end
|
18
search_list.lua
Normal file
18
search_list.lua
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
return function(query)
|
||||||
|
results_table = {}
|
||||||
|
for i=1, #words do
|
||||||
|
local exit = true
|
||||||
|
for j=1, #words[i]-1 do
|
||||||
|
if string.find(words[i][j], query) then
|
||||||
|
exit = false
|
||||||
|
break
|
||||||
|
end
|
||||||
|
end
|
||||||
|
if not exit then
|
||||||
|
local word = {i,words[i]}
|
||||||
|
table.insert(results_table,word)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
results_table = adjustTableSizes(results_table)
|
||||||
|
printOutputTable(results_table)
|
||||||
|
end
|
9
show_list.lua
Normal file
9
show_list.lua
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
return function()
|
||||||
|
results_table = {}
|
||||||
|
for i=1, #words do
|
||||||
|
local word = {i,words[i]}
|
||||||
|
table.insert(results_table,word)
|
||||||
|
end
|
||||||
|
results_table = adjustTableSizes(results_table)
|
||||||
|
printOutputTable(results_table)
|
||||||
|
end
|
292
words.lua
Normal file
292
words.lua
Normal file
@ -0,0 +1,292 @@
|
|||||||
|
local t = {}
|
||||||
|
table.insert(t,{"-el","bringer of","suffix","demonic","ariel","used in demonic names."})
|
||||||
|
table.insert(t,{"-laeso","feed from","suffix","demonic","",""})
|
||||||
|
table.insert(t,{"-sh","intention of","suffix","umyr intention/power","","used in myrean names."})
|
||||||
|
table.insert(t,{"a.bae","young","modifier","","",""})
|
||||||
|
table.insert(t,{"a.e mo?","isn't it right","expression","","",""})
|
||||||
|
table.insert(t,{"a.ga.ka","at, towards, for","marker","","","also used like"})
|
||||||
|
table.insert(t,{"a.pat.ku","to lie (on a surface)","verb","","",""})
|
||||||
|
table.insert(t,{"a.ra","energy","noun","","",""})
|
||||||
|
table.insert(t,{"a.ra.un","construct","noun","energy + being","",""})
|
||||||
|
table.insert(t,{"a.ri","willpower, motivation, desire","noun","demonic","ariel",""})
|
||||||
|
table.insert(t,{"a.ri.lae","demon","noun","","",""})
|
||||||
|
table.insert(t,{"a.wa.te","short","modifier","","",""})
|
||||||
|
table.insert(t,{"aa","correct","modifier","","",""})
|
||||||
|
table.insert(t,{"an","path, step","noun","","fern's name",""})
|
||||||
|
table.insert(t,{"an.ko","thank you","expression","","@gamer@bungle.online",""})
|
||||||
|
table.insert(t,{"an.ku","to walk, to experience","verb","","fern's name",""})
|
||||||
|
table.insert(t,{"an.me","anime","noun","","",""})
|
||||||
|
table.insert(t,{"ba","you","pronoun","","",""})
|
||||||
|
table.insert(t,{"ba.bii.ku","to count","verb","","",""})
|
||||||
|
table.insert(t,{"ba.ku","to swim","verb","","",""})
|
||||||
|
table.insert(t,{"ba.lo","hand","noun","","",""})
|
||||||
|
table.insert(t,{"ba.pa.ba.pa.ku","to rub, to hug","verb","","",""})
|
||||||
|
table.insert(t,{"ba.pa.ku","to hold","verb","","",""})
|
||||||
|
table.insert(t,{"bae","small","modifier","","",""})
|
||||||
|
table.insert(t,{"bae.la.pa","egg","noun","","",""})
|
||||||
|
table.insert(t,{"bae.ma.ra","yesterday","noun","","",""})
|
||||||
|
table.insert(t,{"bar.an","grass","noun","","",""})
|
||||||
|
table.insert(t,{"be.nyui","bunny","noun","","",""})
|
||||||
|
table.insert(t,{"be.taf","big","modifier","","",""})
|
||||||
|
table.insert(t,{"beg","many, big quantity","modifier","","",""})
|
||||||
|
table.insert(t,{"bi.men","bird","noun","","",""})
|
||||||
|
table.insert(t,{"bo.ku","to sew","verb","","",""})
|
||||||
|
table.insert(t,{"bokuch","clothes, anything sewn","noun","","",""})
|
||||||
|
table.insert(t,{"bu.lu.ku","to kill","verb","","",""})
|
||||||
|
table.insert(t,{"cha.we","narrow","modifier","","",""})
|
||||||
|
table.insert(t,{"chib","tongue","noun","","",""})
|
||||||
|
table.insert(t,{"chin","nose","noun","","",""})
|
||||||
|
table.insert(t,{"chind","round","modifier","","",""})
|
||||||
|
table.insert(t,{"cho.ku","to swell","verb","","",""})
|
||||||
|
table.insert(t,{"chu","denotes the subject","marker","","",""})
|
||||||
|
table.insert(t,{"chu.ku","to write","verb","","",""})
|
||||||
|
table.insert(t,{"d.shen.ish","god of change","noun","umyr great + movement + power, d + shen + ish","from old personal minecraft lore",""})
|
||||||
|
table.insert(t,{"da.ka","dragon","noun","","sounds cool enough",""})
|
||||||
|
table.insert(t,{"da.ka.fe.lish","cat","noun","dragon companion","@lunadragofelis@embracing.space","casual way"})
|
||||||
|
table.insert(t,{"da.ka.un","dragonborn","noun","","",""})
|
||||||
|
table.insert(t,{"da.ti","dirty","modifier","","",""})
|
||||||
|
table.insert(t,{"de.da.la","cold","noun","","",""})
|
||||||
|
table.insert(t,{"dee","straight","modifier","","",""})
|
||||||
|
table.insert(t,{"di.ra.fow","deerfox","noun","","",""})
|
||||||
|
table.insert(t,{"di.rash","fox","noun","","",""})
|
||||||
|
table.insert(t,{"dia.ku","to live","verb","","",""})
|
||||||
|
table.insert(t,{"din.di","in, inside","marker","","",""})
|
||||||
|
table.insert(t,{"do.me","dust","noun","","",""})
|
||||||
|
table.insert(t,{"dra","indicates question","marker","","",""})
|
||||||
|
table.insert(t,{"dsho.ei","fate","noun","","fern's name",""})
|
||||||
|
table.insert(t,{"dsho.ei.an","shared destiny, together","modifier","fate + path","fern's name","walking the same path -> sharing the same fate -> together"})
|
||||||
|
table.insert(t,{"dsho.ei.an.un","romantic partner, (fated)","noun","fate + path + being","fern's name",""})
|
||||||
|
table.insert(t,{"du.tan","smoke","noun","","",""})
|
||||||
|
table.insert(t,{"duch","seed","noun","","",""})
|
||||||
|
table.insert(t,{"e","which","pronoun","","",""})
|
||||||
|
table.insert(t,{"e.ma","who","pronoun","","",""})
|
||||||
|
table.insert(t,{"e.mat","where","pronoun","","",""})
|
||||||
|
table.insert(t,{"e.nya","when","pronoun","","",""})
|
||||||
|
table.insert(t,{"e.o","how","pronoun","","",""})
|
||||||
|
table.insert(t,{"e.wa.la","root","noun","","",""})
|
||||||
|
table.insert(t,{"e.yu","what","pronoun","","",""})
|
||||||
|
table.insert(t,{"ee","new, yes","modifier, expression","","",""})
|
||||||
|
table.insert(t,{"el.fu","animal","noun","","",""})
|
||||||
|
table.insert(t,{"en.te","duck","noun","","",""})
|
||||||
|
table.insert(t,{"ep.shi","leaf","noun","","",""})
|
||||||
|
table.insert(t,{"esh.dre","moon, secret","noun","demonic","esdreel",""})
|
||||||
|
table.insert(t,{"fa","not","modifier","","",""})
|
||||||
|
table.insert(t,{"fe.la","dog","noun","heon companion, fe.li","",""})
|
||||||
|
table.insert(t,{"fe.li","companion","noun","","from dakafelish, cat",""})
|
||||||
|
table.insert(t,{"fe.rab","forest","noun","","",""})
|
||||||
|
table.insert(t,{"fern","river","noun","","fern's name",""})
|
||||||
|
table.insert(t,{"fi.lo","flower","noun","","",""})
|
||||||
|
table.insert(t,{"fich","fish","noun","","from fish",""})
|
||||||
|
table.insert(t,{"fin.a","finger","noun","","",""})
|
||||||
|
table.insert(t,{"fiu.ku","to blow","verb","","",""})
|
||||||
|
table.insert(t,{"fo.ri","hair","noun","","",""})
|
||||||
|
table.insert(t,{"fow","deer","noun","","from fox in deerfox",""})
|
||||||
|
table.insert(t,{"fu.fo","belly","noun","","",""})
|
||||||
|
table.insert(t,{"fu.ra","full, whole","modifier","","",""})
|
||||||
|
table.insert(t,{"ga.da.la","warm","modifier","","",""})
|
||||||
|
table.insert(t,{"ga.la","far","modifier","","",""})
|
||||||
|
table.insert(t,{"ga.le","stick","noun","","",""})
|
||||||
|
table.insert(t,{"ga.mat","there","pronoun","","",""})
|
||||||
|
table.insert(t,{"ga.o","that","pronoun","","",""})
|
||||||
|
table.insert(t,{"gan.ku","to go there","verb","","",""})
|
||||||
|
table.insert(t,{"ge.shi.ku","to sit","verb","","",""})
|
||||||
|
table.insert(t,{"grak.ma.dash","god of justice","noun","umyr fire + justice + power, grak + mada + ish","from old personal minecraft lore","grakmadaz is the deity that makes justice upon the world."})
|
||||||
|
table.insert(t,{"gu.gu","ear","noun","","",""})
|
||||||
|
table.insert(t,{"gu.gun.ku","to hear","verb","","",""})
|
||||||
|
table.insert(t,{"gu.me","sand","noun","","",""})
|
||||||
|
table.insert(t,{"gu.ra.ki.ku","to burn","verb","","",""})
|
||||||
|
table.insert(t,{"gu.ra.ton","green","modifier","","",""})
|
||||||
|
table.insert(t,{"gu.rak","fire","noun","","",""})
|
||||||
|
table.insert(t,{"ha.mi","heavy","modifier","","",""})
|
||||||
|
table.insert(t,{"hansh","failure","noun","","hans dont be mean to yourself :c",""})
|
||||||
|
table.insert(t,{"heag.ku","to vomit","verb","","",""})
|
||||||
|
table.insert(t,{"ho.ku","to freeze","verb","","",""})
|
||||||
|
table.insert(t,{"ho.wi.to","ice","noun","","",""})
|
||||||
|
table.insert(t,{"hu.ma","few, small quantity","modifier","","",""})
|
||||||
|
table.insert(t,{"hun","left","noun, modifier","","",""})
|
||||||
|
table.insert(t,{"i.fa","fat","noun","","",""})
|
||||||
|
table.insert(t,{"i.ke","bad","modifier","","",""})
|
||||||
|
table.insert(t,{"i.ma","he, she, they, it","pronoun","","",""})
|
||||||
|
table.insert(t,{"i.pa","road","noun","","",""})
|
||||||
|
table.insert(t,{"i.pa.li","feminity","noun","","","not quite feminity as we understand it, but a more distanced and stranger interpretation of it. "})
|
||||||
|
table.insert(t,{"i.shi","beauty","noun","","@sugarbell aka isi",""})
|
||||||
|
table.insert(t,{"i.shi.ton","pink","noun","heonian beauty + color, ishi + ton","","pink is pretty."})
|
||||||
|
table.insert(t,{"i.shu.ku","to float","verb","","",""})
|
||||||
|
table.insert(t,{"ier","hope","noun","","",""})
|
||||||
|
table.insert(t,{"k.yu","with <someone>","marker","","",""})
|
||||||
|
table.insert(t,{"ka.la","near","modifier","","",""})
|
||||||
|
table.insert(t,{"ka.mat","here","pronoun","","",""})
|
||||||
|
table.insert(t,{"ka.o","this","pronoun","","",""})
|
||||||
|
table.insert(t,{"ka.we","thin","modifier","","",""})
|
||||||
|
table.insert(t,{"kan.ku","to come here","verb","","",""})
|
||||||
|
table.insert(t,{"kash.ku","to break","verb","","",""})
|
||||||
|
table.insert(t,{"ki.ku","to say, to chat","verb","","",""})
|
||||||
|
table.insert(t,{"ku","to be","verb","","","basic verbal form."})
|
||||||
|
table.insert(t,{"ku.ku.ku","to embody","noun","","",""})
|
||||||
|
table.insert(t,{"kya.ny","long","modifier","","",""})
|
||||||
|
table.insert(t,{"la.e","spouse","noun","","",""})
|
||||||
|
table.insert(t,{"la.fen","feather","noun","","",""})
|
||||||
|
table.insert(t,{"li.ku","to like","verb","umyr love + verb form, li","",""})
|
||||||
|
table.insert(t,{"li.pa","liver","noun","","",""})
|
||||||
|
table.insert(t,{"lu.fu","guts","noun","","",""})
|
||||||
|
table.insert(t,{"lu.ku","to die","verb","","",""})
|
||||||
|
table.insert(t,{"lu.nya","dawn, dusk","noun","","from dusk, dawn , (@lunadragofelis@embracing.space)",""})
|
||||||
|
table.insert(t,{"lu.nya.ton","orange","noun","","","color of dusk/dawn"})
|
||||||
|
table.insert(t,{"ma","concenciousness","noun","","",""})
|
||||||
|
table.insert(t,{"ma.chun","examination","noun","","",""})
|
||||||
|
table.insert(t,{"ma.ra","day","noun","","",""})
|
||||||
|
table.insert(t,{"ma.ya.ri","smooth","modifier","","",""})
|
||||||
|
table.insert(t,{"maa.me","if","marker","","",""})
|
||||||
|
table.insert(t,{"me.ku","to dig","verb","","",""})
|
||||||
|
table.insert(t,{"me.lu","blood","noun","","",""})
|
||||||
|
table.insert(t,{"me.lu","blood","noun","","",""})
|
||||||
|
table.insert(t,{"me.lu.lae","louse","noun","","","blood sucker. also derogatory."})
|
||||||
|
table.insert(t,{"me.lu.ton","red","noun","","","color of blood"})
|
||||||
|
table.insert(t,{"me.ta","tail","noun","","",""})
|
||||||
|
table.insert(t,{"mei","snow, safety","noun","heonian","one of my names , i want it to be something pretty, also one of fern's name (safety)",""})
|
||||||
|
table.insert(t,{"mi","time","noun","","",""})
|
||||||
|
table.insert(t,{"mi.gu.ra","daytime","noun","","",""})
|
||||||
|
table.insert(t,{"mi.ku","to put into words","verb","","bincat",""})
|
||||||
|
table.insert(t,{"mi.la.ta","sky","noun","","",""})
|
||||||
|
table.insert(t,{"mi.me","past","noun","","",""})
|
||||||
|
table.insert(t,{"mi.min.ku","to see","veb","","",""})
|
||||||
|
table.insert(t,{"mi.nya","present","noun","","",""})
|
||||||
|
table.insert(t,{"mi.ra.ku","to fear","verb","","",""})
|
||||||
|
table.insert(t,{"mi.sha.ku","to fight","verb","","",""})
|
||||||
|
table.insert(t,{"mi.shi","nighttime","noun","","",""})
|
||||||
|
table.insert(t,{"mi.wa","cat","noun","sounds cats make","",""})
|
||||||
|
table.insert(t,{"mi.we","future","noun","","",""})
|
||||||
|
table.insert(t,{"mimi","eye","noun","","",""})
|
||||||
|
table.insert(t,{"mir.he.on","myrheon (country)","noun","umyr myr + heonian heon, mir + heon","from poem",""})
|
||||||
|
table.insert(t,{"mu.ku","to kiss","verb","","",""})
|
||||||
|
table.insert(t,{"mya","preceeds subordinate clause","marker","","","marker after chu / subject yu, before subordinate clause"})
|
||||||
|
table.insert(t,{"nao.mi","dream","noun","","",""})
|
||||||
|
table.insert(t,{"nao.mi.min.ku","to dream","verb","","",""})
|
||||||
|
table.insert(t,{"nya","hi (informal)","expression","from nyan.pash hello","",""})
|
||||||
|
table.insert(t,{"nya.ba.ku","to tie","verb","","",""})
|
||||||
|
table.insert(t,{"nya.ku","to ask for","verb","","",""})
|
||||||
|
table.insert(t,{"nya.wa","rope","noun","","",""})
|
||||||
|
table.insert(t,{"nyan.pash","hello!, good morning!","expression","","from nyanpasu",""})
|
||||||
|
table.insert(t,{"nyu.sha.ku","to suck","verb","","",""})
|
||||||
|
table.insert(t,{"o.li.dia","goddess of life","noun","umyr care + love + life , o + li + dia","from old personal minecraft lore","olidia is the deity who takes care upon all living creatures."})
|
||||||
|
table.insert(t,{"pa.ku","to stand","verb","","",""})
|
||||||
|
table.insert(t,{"pa.lo","foot","noun","","",""})
|
||||||
|
table.insert(t,{"pa.re","knowledge","noun","","",""})
|
||||||
|
table.insert(t,{"pa.re.ku","to know, to learn","verb","","",""})
|
||||||
|
table.insert(t,{"pa.ru","stupid","modifier","","",""})
|
||||||
|
table.insert(t,{"par.un","human","noun","","",""})
|
||||||
|
table.insert(t,{"paronya","understanding, agreement","noun","","",""})
|
||||||
|
table.insert(t,{"pat","back","noun","","",""})
|
||||||
|
table.insert(t,{"pat.ku","to spit","verb","","",""})
|
||||||
|
table.insert(t,{"pau.me","meat","noun","","",""})
|
||||||
|
table.insert(t,{"pe.fin.a","finernail","noun","","",""})
|
||||||
|
table.insert(t,{"pe.ma","wide","modifier","","",""})
|
||||||
|
table.insert(t,{"pe.ma.mat","mountain","noun","","",""})
|
||||||
|
table.insert(t,{"pe.ri","claws","noun","","",""})
|
||||||
|
table.insert(t,{"pe.ri.ku","to scratch","verb","","",""})
|
||||||
|
table.insert(t,{"pe.wa.mat","sea","noun","","",""})
|
||||||
|
table.insert(t,{"pee.ka.ku","to think, to become aware","verb","","",""})
|
||||||
|
table.insert(t,{"pee.ta.ku","to pull, to remove (something)","verb","","",""})
|
||||||
|
table.insert(t,{"pen.ya","dog","noun","","",""})
|
||||||
|
table.insert(t,{"pew.ku","to turn","verb","","",""})
|
||||||
|
table.insert(t,{"pi.ke.she","rotten","modifier","","",""})
|
||||||
|
table.insert(t,{"pi.she.ku","to drink","verb","","",""})
|
||||||
|
table.insert(t,{"po.me","stone","noun","","",""})
|
||||||
|
table.insert(t,{"po.nya","good","modifier","","",""})
|
||||||
|
table.insert(t,{"pon.me","because","marker","","",""})
|
||||||
|
table.insert(t,{"pu","star","noun","heonian","actually puropu was first",""})
|
||||||
|
table.insert(t,{"pu.ro.pu","sol, sun","noun","heonian pu + ro + pu, star among stars","",""})
|
||||||
|
table.insert(t,{"pu.ro.ton","yellow","modifier","","",""})
|
||||||
|
table.insert(t,{"ra.ya","parent","noun","","",""})
|
||||||
|
table.insert(t,{"re","i","pronoun","","",""})
|
||||||
|
table.insert(t,{"ri","sharp","modifier","","",""})
|
||||||
|
table.insert(t,{"ri.la","horns","noun","demonic","",""})
|
||||||
|
table.insert(t,{"rin","right","noun, modifier","","",""})
|
||||||
|
table.insert(t,{"ro","among","marker","","",""})
|
||||||
|
table.insert(t,{"rop.sha","dull","modifier","","",""})
|
||||||
|
table.insert(t,{"sha","change","noun","demonic","",""})
|
||||||
|
table.insert(t,{"sha.lat","salt, salty","noun, modifier","","",""})
|
||||||
|
table.insert(t,{"sha.pan","other","pronoun","","",""})
|
||||||
|
table.insert(t,{"shaf.ra","elegant","modifier","","",""})
|
||||||
|
table.insert(t,{"shai.ku","to add (math)","verb","","",""})
|
||||||
|
table.insert(t,{"shai.rup","math","noun","","",""})
|
||||||
|
table.insert(t,{"shash.ku","to split, to cut, to divide","verb","","",""})
|
||||||
|
table.insert(t,{"she.ka","mouth","noun","","",""})
|
||||||
|
table.insert(t,{"she.ku","to eat","verb","","",""})
|
||||||
|
table.insert(t,{"she.ma","louse","noun","","",""})
|
||||||
|
table.insert(t,{"she.pa","fruit","noun","","",""})
|
||||||
|
table.insert(t,{"sheb","head","noun","","",""})
|
||||||
|
table.insert(t,{"shen","direction","noun","umyr movement, shen","zenish was first, zen -> dshen -> shen",""})
|
||||||
|
table.insert(t,{"shen.i.sha","west","noun","heon zenish","",""})
|
||||||
|
table.insert(t,{"shen.i.sha.pe.ra","northwest","noun","heon west + north, shen.i.sha + shen.pe.ra","",""})
|
||||||
|
table.insert(t,{"shen.i.sha.ron","southwest","noun","heon west + south, shen.i.sha + shen.o.ron","",""})
|
||||||
|
table.insert(t,{"shen.lu.mo.ro","southeast","noun","heon east + south, shen.i.lun.a + shen.o.ron","",""})
|
||||||
|
table.insert(t,{"shen.lu.nya","east","noun","heon dusk, dawn","",""})
|
||||||
|
table.insert(t,{"shen.lun.pe.ra","northeast","noun","heon east + north, shen.lun.a + shen.pe.ra","",""})
|
||||||
|
table.insert(t,{"shen.o.ron","south","noun","heonian","",""})
|
||||||
|
table.insert(t,{"shen.pe.ra","north","noun","heonian","",""})
|
||||||
|
table.insert(t,{"shi","leg","noun","","",""})
|
||||||
|
table.insert(t,{"shi.de.ton","black","modifier","","",""})
|
||||||
|
table.insert(t,{"shi.fu.mat","snake","noun","","",""})
|
||||||
|
table.insert(t,{"shi.nya","breast","noun","","",""})
|
||||||
|
table.insert(t,{"shi.ra.ku","to have pending","verb","","",""})
|
||||||
|
table.insert(t,{"shin.da","wind","noun","","",""})
|
||||||
|
table.insert(t,{"shin.li","masculitnity","noun","","","not quite masculinity as we understand it, but a more distanced and stranger interpretation of it. "})
|
||||||
|
table.insert(t,{"shon.ku","to smell","verb","","",""})
|
||||||
|
table.insert(t,{"shoo","essence, core, heart","noun","","",""})
|
||||||
|
table.insert(t,{"shoo.ma","spirit","noun","","",""})
|
||||||
|
table.insert(t,{"shoo.me.ku","to bury","verb","","",""})
|
||||||
|
table.insert(t,{"ta.fa.ku","to wipe","verb","","",""})
|
||||||
|
table.insert(t,{"ta.mi.nya","innocence, honesty","noun, modifier","","fern's name","in a way of pure"})
|
||||||
|
table.insert(t,{"ta.te.ti","heart","noun","","",""})
|
||||||
|
table.insert(t,{"te.a.ga.ku","to throw","verb","","",""})
|
||||||
|
table.insert(t,{"te.ba.ku","to squeeze, to hug","verb","","",""})
|
||||||
|
table.insert(t,{"te.cha","skin","noun","","",""})
|
||||||
|
table.insert(t,{"te.ga.me","story","noun","","",""})
|
||||||
|
table.insert(t,{"te.ku","to hurt, to hit","verb","","",""})
|
||||||
|
table.insert(t,{"te.ma","neck","noun","","",""})
|
||||||
|
table.insert(t,{"te.me.lu.ku","to stab and make bleed","verb","","",""})
|
||||||
|
table.insert(t,{"te.yo","knee","noun","","",""})
|
||||||
|
table.insert(t,{"tee.ku","to push, to place (something)","verb","","",""})
|
||||||
|
table.insert(t,{"tek.da","ash","noun","","",""})
|
||||||
|
table.insert(t,{"ti.pa.sha","tree","noun","","",""})
|
||||||
|
table.insert(t,{"ti.pash","bark","noun","","",""})
|
||||||
|
table.insert(t,{"ti.she.ku","to bite","verb","","",""})
|
||||||
|
table.insert(t,{"ti.ta","teeth","noun","","",""})
|
||||||
|
table.insert(t,{"to","and","marker","","",""})
|
||||||
|
table.insert(t,{"tree.ya","ambiguous","modifier","","",""})
|
||||||
|
table.insert(t,{"u.da","old","modifier","","",""})
|
||||||
|
table.insert(t,{"u.ta.ku","to give birth","verb","","",""})
|
||||||
|
table.insert(t,{"u.ta.yan","childbearer","noun","","","they also get to name the children"})
|
||||||
|
table.insert(t,{"u.ti.ku","to create","verb","","",""})
|
||||||
|
table.insert(t,{"u.tia.raun","deity","noun","","",""})
|
||||||
|
table.insert(t,{"u.u","no","expression","","",""})
|
||||||
|
table.insert(t,{"u.wa.ton","white","modifier","","",""})
|
||||||
|
table.insert(t,{"u.wa.wa","bone","noun","","",""})
|
||||||
|
table.insert(t,{"u.we","worm","noun","","",""})
|
||||||
|
table.insert(t,{"un","being","noun","","",""})
|
||||||
|
table.insert(t,{"un.bae","child","noun","","",""})
|
||||||
|
table.insert(t,{"un.be.taf","adult","noun","","",""})
|
||||||
|
table.insert(t,{"wa.ek","thick","modifier","","",""})
|
||||||
|
table.insert(t,{"wa.mat","lake","noun","","",""})
|
||||||
|
table.insert(t,{"wa.to","water","noun","","",""})
|
||||||
|
table.insert(t,{"wa.wo.te","rain","noun","","",""})
|
||||||
|
table.insert(t,{"wan.to","wet","modifier","","",""})
|
||||||
|
table.insert(t,{"we.nya","future","noun","","",""})
|
||||||
|
table.insert(t,{"wi.ki","song","noun","","",""})
|
||||||
|
table.insert(t,{"wi.ki.ku","to sing","verb","","",""})
|
||||||
|
table.insert(t,{"wi.mo.ku","to play","verb","","",""})
|
||||||
|
table.insert(t,{"wi.po.mat","planet, earth","noun","","",""})
|
||||||
|
table.insert(t,{"wi.ta.ku","to wash","verb","","",""})
|
||||||
|
table.insert(t,{"wi.wi.ku","to laugh","verb","","",""})
|
||||||
|
table.insert(t,{"win.ku","to fly","verb","","",""})
|
||||||
|
table.insert(t,{"wind","wings","noun","","",""})
|
||||||
|
table.insert(t,{"wo.k.ku","to fall","verb","","",""})
|
||||||
|
table.insert(t,{"wuu.ga","fog","noun","","",""})
|
||||||
|
table.insert(t,{"ya.ku","to give","verb","","",""})
|
||||||
|
table.insert(t,{"yan.wa","ocean","noun","","",""})
|
||||||
|
table.insert(t,{"yea.mat","area","noun","","",""})
|
||||||
|
table.insert(t,{"yesh","adorable","modifier","","",""})
|
||||||
|
table.insert(t,{"yi.ma","year","noun","","",""})
|
||||||
|
table.insert(t,{"yu","denotes topic, emphasis","marker","","","overwrites subject if they are the same. otherwise, goes after subject marker"})
|
||||||
|
return t
|
Loading…
Reference in New Issue
Block a user