Compare commits

...

39 Commits

Author SHA1 Message Date
UndeadMaelys
652a1e9cda adapted and changed to the new words file 2022-06-06 07:13:30 +02:00
Bizcochito
6c54a672f7 fuck go back 2022-05-06 17:56:13 +02:00
Bizcochito
35f900b2b2 now we pull the submodule correctly 2022-05-06 17:53:07 +02:00
Bizcochito
3bca35a1d3 changed so the program updates in evey use with the intalled version 2022-05-06 17:15:37 +02:00
Bizcochito
09a4b4678b changed so the program updates in evey use with the intalled version 2022-05-06 17:11:06 +02:00
UndeadMaelys
c0c64d1912 fixed last merge 2022-05-05 11:30:20 +02:00
UndeadMaelys
8e7616e490 Merge branch 'master' of ssh.succubi.services:lustlion/conlang-heonian 2022-05-05 11:29:34 +02:00
UndeadMaelys
d4577b988c spliced content into another repo 2022-05-05 11:29:09 +02:00
Bizcochito
7900084f16 changed stuff in lessons 2 2022-05-04 07:26:52 +02:00
UndeadMaelys
0154cc5e1b new words! 2022-05-01 15:26:51 +02:00
UndeadMaelys
ba2465a3bf fixed r2h2, added words 2022-05-01 12:37:26 +02:00
UndeadMaelys
db5155ea5c added words and created story <: 2022-04-20 15:00:31 +02:00
UndeadMaelys
15ce55f654 added words, started a story 2022-04-18 03:18:43 +02:00
UndeadMaelys
b4948232fd words 2022-03-31 13:11:58 +02:00
UndeadMaelys
ff602e4ea8 edit words 2022-03-31 10:31:38 +02:00
UndeadMaelys
6491c3ab50 word 2022-03-31 10:30:28 +02:00
UndeadMaelys
c172ef8000 more words 2022-03-31 10:27:22 +02:00
UndeadMaelys
38fb2f7a51 lesson 2022-03-31 10:09:28 +02:00
UndeadMaelys
95c0ce5062 added lessons! 2022-03-25 10:46:30 +01:00
UndeadMaelys
93da415c03 added a way to edit, remove, and add words 2022-03-24 21:47:24 +01:00
UndeadMaelys
39335c7e09 allows to search with id 2022-03-23 02:35:44 +01:00
UndeadMaelys
0d9f64bf58 fixed word 2022-03-22 21:26:51 +01:00
UndeadMaelys
a1a03918ee fixed word 2022-03-22 21:26:27 +01:00
UndeadMaelys
b8cee98c44 added parameters to limit search to heonian or translation 2022-03-22 21:24:34 +01:00
UndeadMaelys
bd15134c3e search now ignores syllable separations 2022-03-22 21:11:14 +01:00
UndeadMaelys
9e1e0e603c changed words; adjusted formatting 2022-03-22 20:38:17 +01:00
UndeadMaelys
caaba4a058 modified words 2022-03-22 20:27:15 +01:00
UndeadMaelys
68dc7a8f19 modified words 2022-03-22 20:26:55 +01:00
UndeadMaelys
f181c7b935 improved help messages 2022-03-22 20:19:27 +01:00
UndeadMaelys
f044bff0a9 remove newline from copy 2022-03-22 19:56:03 +01:00
UndeadMaelys
55df86b084 added copy to clipboard for transcription 2022-03-22 19:48:28 +01:00
UndeadMaelys
0526d6a9f6 fix meta origin showing twice 2022-03-22 18:10:53 +01:00
UndeadMaelys
419b9bd67c fix help messages 2022-03-22 18:07:46 +01:00
UndeadMaelys
c9ca082f44 changed how handle help and added abreviations 2022-03-22 18:06:01 +01:00
Bizcochito
31bdc665b3 Done the usage help thing, with no actuall good help for now 2022-03-22 17:54:30 +01:00
UndeadMaelys
b785f16e65 fixed extra line, added word 2022-03-22 17:27:45 +01:00
bizcochito
854c9ca2a3 i forgor 2022-03-23 01:25:29 +09:00
UndeadMaelys
8c8a2cdb4b added notes to the detailed view 2022-03-22 17:23:28 +01:00
UndeadMaelys
46c3c0f297 modified description for word #259 utayan 2022-03-22 17:23:28 +01:00
16 changed files with 715 additions and 367 deletions

3
.gitmodules vendored Normal file
View File

@@ -0,0 +1,3 @@
[submodule "heonian-content"]
path = heonian-content
url = git@ssh.succubi.services:lustlion/heonian-content.git

View File

@@ -1,4 +1,17 @@
--functions
function CopyToClipboard(textString)
outClipFile, err = io.open("clipboardTempFile",'w')
if err then
print("[Error Opening Clipboard Temporary File for Writing]")
return
end
outClipFile:write(textString,'\n')
outClipFile:close()
command = 'cat "' .. "clipboardTempFile" .. '" | xclip -selection clipboard &'
os.execute(command)
os.execute("rm clipboardTempFile")
end
function PrintPosition(string)
local print_text = ""
for i = 1, string.len(string) do
@@ -41,31 +54,32 @@ function convertToHeonian(text)
local transcribed = false
local transcribed_text = ""
-- verbose?
local debug = false
local autocopy = false
for _, v in pairs(arg) do
if v == "-v" or v == "--verbose" then debug = true end
if v == "-c" or v == "--copy" then autocopy = true end
end
-- we check if it's necessary to check anything at all
-- evaluate conditions that cancel the script
if text == "" then
step = false
end
-- if its necessary to run the script, then we continue :D
if step then
-- prepare text
if step ~= false then
-- 1. add syllable marker at start if it isn't already present.
if string.sub(text,1,1) ~= "." then text = "." .. text end
-- 2. turn [x] into [ksh]
text = string.gsub(text,"x","ksh")
-- 3. turn [z] into [dsh]
text = string.gsub(text,"z","d.sh")
end
-- read input and transcribe
if step == true then
-- 1. add syllable marker at start if it isn't already present.
if string.sub(text,1,1) ~= "." then text = "." .. text end
-- 2. turn [x] into [ksh]
text = string.gsub(text,"x","ksh")
-- 3. turn [z] into [dsh]
text = string.gsub(text,"z","d.sh")
-- 4. turn [j] into [y]
text = string.gsub(text,"j","y")
-- read input and transcribe
-- debug log
if debug then print("") end
if debug then print("src: ".. text) end
@@ -74,22 +88,25 @@ function convertToHeonian(text)
-- sort through all the letters
local i = 1
while i < string.len(text)+1 do
-- know current pos
local char_step = true -- this is false when a conclusion has been reached
local pos = i
-- know current pos, since we will be modifying i but still evaluating from the position
local char_step = true
-- this is false when a conclusion has been reached about what symbol does the next segment correspond to
local debug_s = ""
local new_syllable = false
-- debug string
local new_syllable = false
-- this is true when a new syllable begins, and is used to distinguish normal vowels and consonants from trailing ones
if string.sub(text,i,i) == " "
or string.sub(text,i,i) == "\t"
or string.sub(text,i,i) == "-" then
or string.sub(text,i,i) == "-" then -- check if its an unsupported symbol to skip it.
-- adjust i
i = i + 1
pos = i
char_step = false
end
-- init checkup
if string.sub(text,i,i) == "." then
if string.sub(text,i,i) == "." then -- this forces the new syllable, since . is the syllable separator, also skips the symbol and repositions
-- debug log
if debug then print("") end
if debug then print(" @[".. tostring(i).."]"..debug_s.." new syllable MARKER found") end
@@ -106,7 +123,7 @@ function convertToHeonian(text)
-- debug log
if debug then print("") end
if debug then print(" @[".. tostring(i).."]"..debug_s.." checking string: ".. string.sub(text,i,i)) end
if debug then print(" @[".. tostring(i).."]"..debug_s.." checking string: ".. string.sub(text,i,i)) end -- debug print positional info
-- lets check if it is a composite
if char_step == true then
@@ -205,7 +222,7 @@ function convertToHeonian(text)
end
end
-- if no idea, move on
-- if no idea, move on, and just be confused. prints ?
if char_step == true then
-- debug log
if debug then print(" [!] no idea; moving on to next [".. pos + 1 .."]") end
@@ -218,6 +235,8 @@ function convertToHeonian(text)
end
-- output
if autocopy then CopyToClipboard(string.sub(transcribed_text,0,-1)) end
if transcribed_text == nil then transcribed_text = "" end
return transcribed_text
end
end

View File

@@ -1,4 +1,4 @@
#USAGE
# USAGE
* all
- shows all words in heonian
@@ -7,13 +7,13 @@
* transcript \<word\>
- transcripts the word to [heonian script](https://cronut.cafe/~lustlion/myrheon/test/)
#INSTALL
# INSTALL
##Linux
## Linux
* 1. Run install.sh
- Preferibly as root, if you run as nonroot your install will be on your local user bin.
* 2. Now you can run "heonian" or "lexicon" to use this app
##Windows
## Windows
* There is no windows installer yet
- Just run "lexicon" here.

46
add_word.lua Normal file
View File

@@ -0,0 +1,46 @@
function addWord()
number = tonumber(number)
-- open file
local file = io.open(data_get, "r")
local pos = 0
-- all this is old code, not useful now, but still in case i need it <:
-- get insertion point
pos = file:seek("end")
-- store after
local after = file:read("*all")
-- store before
file:seek("set")
local before = file:read(pos-1)
-- done reading
file:close()
modify = {}
print("insert new phonetics:")
modify[1] = io.read()
print("insert new meaning:")
modify[2] = io.read()
print("insert new type:")
modify[3] = io.read()
print("insert new canon origin:")
modify[4] = io.read()
print("insert new meta origin:")
modify[5] = io.read()
print("insert new notes:")
modify[6] = io.read()
local file = io.open(data_output, "w+")
file:write(before)
file:write("\n")
for i=1, #modify do
if i > 1 then
file:write(" ")
end
file:write(modify[i])
end
file:write("")
file:write(after)
file:close()
end

View File

@@ -25,10 +25,34 @@ COLOR = Enum {
"White"
}
EFFECT = Enum {
"Normal",
"Bold",
"Dim",
"Italic",
"Underline",
"BlinkSlow",
"BlinkFast",
"Invert",
"Conceal",
"CrossedOut"
}
function makeTextEffect(Effect, Text)
Text = Text or ""
return "\027["..tostring(Effect-1).."m"..Text.."\027[0;m"
end
function colorText(Color, Text)
return "\027[38;5;"..tostring(Color-1).."m"..Text.."\027[0;m"
Text = Text or ""
return "\027[38;5;"..tostring(Color-1).."m"..Text.."\027[0;m"
end
function colorTextBackground(Color, Text)
return "\027[48;5;"..tostring(Color-1).."m"..Text.."\027[0;m"
end
Text = Text or ""
return "\027[48;5;"..tostring(Color-1).."m"..Text.."\027[0;m"
end
function scrollTerminalUp(amount)
return "\027["..amount.."T"
end

143
edit_word.lua Normal file
View File

@@ -0,0 +1,143 @@
function editWord(number)
number = tonumber(number)
-- determine modifications
local modify_all = true
local reenable_modify_all = false
local mph = false
local mm = false
local mt = false
local mco = false
local mmo = false
local mn = false
for _, v in pairs(arg) do
if v == "--modify-phonetics"
or v == "-mph" then
modify_all = false
mph = true
end
if v == "--modify-meaning"
or v == "-mm" then
modify_all = false
mm = true
end
if v == "--modify-type"
or v == "-mt" then
modify_all = false
mt = true
end
if v == "--modify-canon-origin"
or v == "-mco" then
modify_all = false
mco = true
end
if v == "--modify-meta-origin"
or v == "-mmo" then
modify_all = false
mmo = true
end
if v == "--modify-notes"
or v == "-mn" then
modify_all = false
mn = true
end
if v == "--modify-all"
or v == "-ma" then
reenable_modify_all = true
end
end
if reenable_modify_all then
modify_all = true
end
-- open file
local file = io.open(data_get, "r")
local text = file:read("*all")
local pos = 0
-- move to line
for i=1, number do
local npos = string.find(text,"\n",pos+1)
if npos == nil then break end
pos = npos
end
file:seek("set",0)
local before = file:read(pos)
file:seek("set",pos)
-- move to things to modify
pos = string.find(text,"{\"",pos)+1
file:seek("set",pos)
modify = {}
for i=1, 6 do
-- find next "
length = string.find(text,"\"",pos+1)-pos
table.insert(modify,file:read(length-1))
if i < 6 then
pos = string.find(text,"\"",pos+length+1)
end
file:seek("set",pos)
end
file:seek("set",string.find(text,"})",pos+length+1)+1)
local after = file:read("*all") or ""
file:close()
print("FILE:")
print(" phonetics",modify[1])
print(" meaning",modify[2])
print(" type",modify[3])
print(" canon origin",modify[4])
print(" meta origin",modify[5])
print(" notes",modify[6])
print("")
if modify_all or mph then
print("insert new phonetics:")
modify[1] = io.read()
end
if modify_all or mm then
print("insert new meaning:")
modify[2] = io.read()
end
if modify_all or mt then
print("insert new type:")
modify[3] = io.read()
end
if modify_all or mco then
print("insert new canon origin:")
modify[4] = io.read()
end
if modify_all or mmo then
print("insert new meta origin:")
modify[5] = io.read()
end
if modify_all or mn then
print("insert new notes:")
modify[6] = io.read()
end
print("NEW:")
print(" phonetics",modify[1])
print(" meaning",modify[2])
print(" type",modify[3])
print(" canon origin",modify[4])
print(" meta origin",modify[5])
print(" notes",modify[6])
--rewrite with new info
local file = io.open(data_output, "w+")
file:write(before)
file:write("\n")
for i=1, #modify do
if i > 1 then
file:write(" ")
end
file:write(modify[i])
end
file:write("")
file:write(after)
file:close()
end

49
get_words.lua Normal file
View File

@@ -0,0 +1,49 @@
function getWords(path)
local file = io.open(path,"r")
local text = file:read("*all")
file:close()
local t = {} -- this table will hold our words!!! words format is {word, translation, types, canon origin, meta origin, notes} last three are optional.
local p = 0 -- current position, nil if at end of file
while p do
p = p + 1
local new_word_data = ""
-- this isnt the whole word yet, but all the data around it separated with tabs, so lets get what we want.
-- careful if its uh end of file tho!
local np = string.find(text,"\n",p) -- np is the next word. this is so we can slice it
if np then
new_word_data = string.sub(text, p, np-1)
else
new_word_data = string.sub(text, p)
end
if new_word_data ~= "" then
local new_word = {} -- we'll hold it all here once spliced
local wp = 0 -- word data position!
while wp do -- nil if at end of string so.
wp = wp + 1 -- lets move past the tab we just found
local wnp = string.find(new_word_data, " ",wp)
local stuff = ""
-- we now splice the word every tab and add it to the uhhh
if wnp then
stuff = string.sub(new_word_data, wp, wnp-1) or stuff
else
stuff = string.sub(new_word_data,wp) or stuff
end
table.insert(new_word,stuff)
wp = wnp
end
-- now we add the word ot the word table
table.insert(t, new_word)
end
-- and move on the next
p = np
end
-- now we just. return the table.
return t
end

1
heonian-content Submodule

Submodule heonian-content added at 86163910b2

36
lessons.lua Normal file
View File

@@ -0,0 +1,36 @@
-- Source https://stackoverflow.com/a/11130774
function scandir(directory)
local directory = directory or ""
local i, t, popen = 0, {}, io.popen
local pfile = popen('ls "'..directory..'"')
for filename in pfile:lines() do
i = i + 1
t[i] = filename
end
pfile:close()
return t
end
function getFormattedLessonNames(directory)
local t = scandir(directory)
for i=1, #t do
t[i] = colorTextBackground(COLOR.Black,"\n#" .. i .. " " .. colorText(COLOR.HighBlue,string.gsub(t[i],".lua","")))
end
return t
end
function lesson(number)
number = tonumber(number)
local t = scandir(dir.."/heonian-content/lessons/")
if t[number] then
dofile("heonian-content/lessons/"..t[number])
else
print("lesson not found")
end
end
function lessonPause()
print("\nInput anything to continue")
io.read()
print(scrollTerminalUp(4))
end

220
lexicon
View File

@@ -1,29 +1,219 @@
#!/usr/bin/env lua
dir = os.getenv("PWD") or io.popen("cd"):read()
if not arg[1] then print("no argument, try again") return end
require "R2H2"
require "color"
adjustTableSizes = require "output_sizes"
printOutputTable = require "output_word"
showList = require "show_list"
searchList = require "search_list"
require "output_sizes"
require "output_word"
words = dofile("words.lua")
require "show_list"
require "search_list"
require "remove_word"
require "add_word"
require "edit_word"
require "get_words"
require "lessons"
if arg[1] == "all" then
showList()
return
data_get = "heonian-content/words"
data_output = "heonian-content/words"
words = getWords(data_get)
for _, v in pairs(arg) do
if v == "-h"
or v == "--help"
or v == "--usage"
or v == "--how"
then
please_help = true
end
end
if arg[1] == "search" then
if arg[2] then searchList(arg[2])
else print("insert query") end
if arg[1] == "h"
or arg[1] == "help"
or arg[1] == "how"
or arg[1] == "howdo" then
print([[
[a]ll
- shows all words in heonian.
[e]dit <word_id>
- removes the word from the lexicon
[h]elp
- shows this message
[l]esson
- shows heonian lessons.
[n]ew
- add new word to the data
[r]emove <word_id>
- removes the word from the lexicon
[s]earch <query>
- searches all words and shows only those that return to string.find()
[tr]anscript <word>
- transcripts the word to heonian script
General parameters:
-h / --help (shows what does the command do)
]])
return
end
if arg[1] == "transcript" then
if arg[2] then print(convertToHeonian(arg[2]))
else print("no string found") end
if arg[1] == "a"
or arg[1] == "all" then
if please_help then
print([[
[a]ll
- shows all words in heonian.
Parameters:
-h / --help (shows what does the command do)
-a / --all (shows more info on the word, if any)
]])
else
showList()
end
return
end
end
if arg[1] == "s"
or arg[1] == "search" then
if please_help then
print([[
[s]earch <query>
- searches all words and shows only those that return to string.find()
Parameters:
-h / --help (shows what does the command do)
-a / --all (shows more info on the word, if any)
-e / --english (limits the search to the translated words)
-r / --raw (limits the search to the heonian words)
-i / --id (searches using word id)
]])
else
if arg[2] then searchList(arg[2])
else print("no query to search") end
end
return
end
if arg[1] == "l"
or arg[1] == "lesson" then
if please_help then
print([[
[l]esson <lesson>
- shows heonian lessons. if no lesson is specified, lists all the lessons.
Parameters:
-h / --help (shows what does the command do)
]])
else
if arg[2] then lesson(arg[2])
else
print("specify a lesson",table.unpack(getFormattedLessonNames(dir.."/heonian-content/lessons/")))
end
end
return
end
if arg[1] == "tr"
or arg[1] == "transcript" then
if please_help then
print([[
[tr]anscript <word>
- transcripts the word to heonian script
Parameters:
-h / --help (shows what does the command do)
-v / --verbose (shows the transcription process)
-c / --copy (copies transcription to keyboard)
]])
else
if arg[2] then print(convertToHeonian(arg[2]))
else print("no string to transcript") end
return
end
end
if arg[1] == "r"
or arg[1] == "remove" then
if please_help then
print([[
[r]emove <word_id>
- removes the word from the lexicon
Parameters:
-h / --help (shows what does the command do)
]])
else
if arg[2]
and tonumber(arg[2]) <= #words
and tonumber(arg[2]) > 0 then
removeWord(arg[2])
else print("no word to remove") end
return
end
end
if arg[1] == "n"
or arg[1] == "new" then
if please_help then
print([[
[n]ew
- add new word to the data
Parameters:
-h / --help (shows what does the command do)
]])
else
addWord()
end
end
if arg[1] == "e"
or arg[1] == "edit" then
if please_help then
print([[
[e]dit <word_id>
- removes the word from the lexicon
Parameters:
-h / --help / (shows what does the command do)
-mph / --modify-phonetics / edit the phonetics field
-mm / --modify-meaning / edit the meaning field
-mt / --modify-type / edit the type field
-mco / --modify-canon-origin / edit the canon origin field
-mmo / --modify-meta-origin / edit the meta origin field
-mn / --modify-notes / edit the notes field
-ma / --modify-all / edit all the fields
(-ma is enabled by default, disabled if other flags)
]])
else
if arg[2]
and tonumber(arg[2]) <= #words
and tonumber(arg[2]) > 0 then
editWord(arg[2])
else print("no word to edit") end
return
end
end

View File

@@ -1,20 +1,20 @@
return function(tbl)
function adjustTableSizes(tbl)
local words_max_length = {1,1,1,1,1,1}
for i=1, #tbl do
local this_word = tbl[i][2]
for j=1, #this_word-1 do
local length = string.len(this_word[j])
if length + 1 > words_max_length[j+1] then
local length = string.len(this_word[j])
if length + 1 > words_max_length[j+1] then
words_max_length[j+1] = length + 1
end
end
local length = string.len(convertToHeonian(this_word[1]))
local length = string.len(this_word[1])
if length + 1 > words_max_length[1] then
words_max_length[1] = length + 1
end
end
for i=1, #words do
for j=1, #words[i]-1 do
if j > 3 then break end
@@ -27,4 +27,4 @@ return function(tbl)
end
end
return tbl
end
end

View File

@@ -1,9 +1,14 @@
return function(tbl)
function printOutputTable(tbl,all)
local show_all = false
for _, v in pairs(arg) do
for _, v in pairs(arg) do
if v == "-a" or v == "--all" then show_all = true end
end
end
if all then
show_all = true
end
for i=1, #tbl do
local this_number = tbl[i][1]
local this_word = tbl[i][2]
@@ -17,22 +22,31 @@ return function(tbl)
text = text .. colorTextBackground(COLOR.Black,colorText(COLOR.HighBlue,this_word[1]))
text = text .. colorTextBackground(COLOR.Black,colorText(COLOR.Gray,this_word[3]))
text = text .. colorTextBackground(COLOR.Black,colorText(COLOR.Green,this_word[2]))
print(text)
if show_all then
text = ""
if this_word[4] ~= "" then
text = text .. colorText(COLOR.HighCyan,"\tCanon origin: ")
text = text .. colorText(COLOR.HighCyan,this_word[4])
print(text)
if this_word[4] ~= ""
or this_word[5] ~= ""
or this_word[6] ~= "" then
text = ""
if this_word[4] ~= "" then
text = text .. colorText(COLOR.HighCyan,"\tCanon origin: ")
text = text .. colorText(COLOR.HighCyan,this_word[4])
print(text)
end
text = ""
if this_word[5] ~= "" then
text = text .. colorText(COLOR.HighPurple,"\tMeta origin: ")
text = text .. colorText(COLOR.HighPurple,this_word[5])
print(text)
end
text = ""
if this_word[6] ~= "" then
text = text .. colorText(COLOR.HighYellow,"\tNotes: ")
text = text .. colorText(COLOR.HighYellow,this_word[6])
print(text)
end
end
text = ""
if this_word[5] ~= "" then
text = text .. colorText(COLOR.HighPurple,"\tMeta origin: ")
text = text .. colorText(COLOR.HighPurple,this_word[5])
print(text)
end
print("")
end
end
end

76
remove_word.lua Normal file
View File

@@ -0,0 +1,76 @@
function removeWord(number)
number = tonumber(number)
local file = io.open(data_get, "r")
local text = file:read("*all")
local pos = 0
-- move to line
for i=1, number do
local npos = string.find(text,"\n",pos+1)
if npos == nil then
print("last line")
break
end
pos = npos
end
-- get before
file:seek("set",0)
local before = file:read(pos-1)
file:seek("set",pos)
-- move to things to modify
pos = string.find(text,"\n",pos+1)
file:seek("set",pos)
-- find next section
local next_section = string.find(text,"\"",pos+1) or file:seek("end")
file:seek("set",pos)
local length = next_section-pos
-- get after
file:seek("set",pos)
local after = file:read("*all") or ""
-- done w file
file:close()
-- get word going to be removed
local results_table = {}
if words[number] then
table.insert(results_table,{number,words[number]})
end
results_table = adjustTableSizes(results_table)
print("\nare you sure you want to remove the following entry? (y/N)")
printOutputTable(results_table,true)
print("")
local delete = string.lower(io.read())
if delete == "y"
or delete == "ye"
or delete == "yes"
then
delete = true
else
delete = false
end
--[[
print("Before")
print(text)
print("After:")
print(before)
print(after)
]]
if delete then
--rewrite with new info
local file = io.open(data_output, "w+")
file:write(before)
file:write("\n")
file:write(after)
file:close()
print("\nentry was deleted.\n")
else
print("\nentry was not deleted.\n")
end
end

View File

@@ -1,11 +1,46 @@
return function(query)
function searchList(query)
results_table = {}
local skip_regular = false
for _, v in pairs(arg) do
if v == "-i" or v == "--id" then
skip_regular = true
local id = tonumber(query)
if words[id] then
local word = {id,words[id]}
table.insert(results_table,word)
end
break
end
end
for i=1, #words do
if skip_regular then break end
local exit = true
for j=1, #words[i]-1 do
if string.find(words[i][j], query) then
exit = false
break
local all = true
for _, v in pairs(arg) do
if v == "-r" or v == "--raw" then
all = false
word = string.gsub(words[i][1],"%p","")
if string.find(word, query) then
exit = false
break
end
end
if v == "-e" or v == "--english" then
all = false
word = string.gsub(words[i][2],"%p","")
if string.find(word, query) then
exit = false
break
end
end
end
if all == true then
for j=1, #words[i] do
word = string.gsub(words[i][j],"%p","")
if string.find(word, query) then
exit = false
break
end
end
end
if not exit then
@@ -17,6 +52,10 @@ return function(query)
results_table = adjustTableSizes(results_table)
printOutputTable(results_table)
else
print("no words found for <" .. query .. ">")
if skip_regular then
print("no word found for id #" .. query)
else
print("no words found for query <" .. query .. ">")
end
end
end
end

View File

@@ -1,4 +1,4 @@
return function()
function showList()
results_table = {}
for i=1, #words do
local word = {i,words[i]}
@@ -6,4 +6,4 @@ return function()
end
results_table = adjustTableSizes(results_table)
printOutputTable(results_table)
end
end

292
words.lua
View File

@@ -1,292 +0,0 @@
local t = {}
table.insert(t,{"-el","bringer of","suffix","demonic","ariel","used in demonic names."})
table.insert(t,{"-laeso","feed from","suffix","demonic","",""})
table.insert(t,{"-sh","intention of","suffix","umyr intention/power","","used in myrean names."})
table.insert(t,{"a.bae","young","modifier","","",""})
table.insert(t,{"a.e mo?","isn't it right","expression","","",""})
table.insert(t,{"a.ga.ka","at, towards, for","marker","","","also used like"})
table.insert(t,{"a.pat.ku","to lie (on a surface)","verb","","",""})
table.insert(t,{"a.ra","energy","noun","","",""})
table.insert(t,{"a.ra.un","construct","noun","energy + being","",""})
table.insert(t,{"a.ri","willpower, motivation, desire","noun","demonic","ariel",""})
table.insert(t,{"a.ri.lae","demon","noun","","",""})
table.insert(t,{"a.wa.te","short","modifier","","",""})
table.insert(t,{"aa","correct","modifier","","",""})
table.insert(t,{"an","path, step","noun","","fern's name",""})
table.insert(t,{"an.ko","thank you","expression","","@gamer@bungle.online",""})
table.insert(t,{"an.ku","to walk, to experience","verb","","fern's name",""})
table.insert(t,{"an.me","anime","noun","","",""})
table.insert(t,{"ba","you","pronoun","","",""})
table.insert(t,{"ba.bii.ku","to count","verb","","",""})
table.insert(t,{"ba.ku","to swim","verb","","",""})
table.insert(t,{"ba.lo","hand","noun","","",""})
table.insert(t,{"ba.pa.ba.pa.ku","to rub, to hug","verb","","",""})
table.insert(t,{"ba.pa.ku","to hold","verb","","",""})
table.insert(t,{"bae","small","modifier","","",""})
table.insert(t,{"bae.la.pa","egg","noun","","",""})
table.insert(t,{"bae.ma.ra","yesterday","noun","","",""})
table.insert(t,{"bar.an","grass","noun","","",""})
table.insert(t,{"be.nyui","bunny","noun","","",""})
table.insert(t,{"be.taf","big","modifier","","",""})
table.insert(t,{"beg","many, big quantity","modifier","","",""})
table.insert(t,{"bi.men","bird","noun","","",""})
table.insert(t,{"bo.ku","to sew","verb","","",""})
table.insert(t,{"bokuch","clothes, anything sewn","noun","","",""})
table.insert(t,{"bu.lu.ku","to kill","verb","","",""})
table.insert(t,{"cha.we","narrow","modifier","","",""})
table.insert(t,{"chib","tongue","noun","","",""})
table.insert(t,{"chin","nose","noun","","",""})
table.insert(t,{"chind","round","modifier","","",""})
table.insert(t,{"cho.ku","to swell","verb","","",""})
table.insert(t,{"chu","denotes the subject","marker","","",""})
table.insert(t,{"chu.ku","to write","verb","","",""})
table.insert(t,{"d.shen.ish","god of change","noun","umyr great + movement + power, d + shen + ish","from old personal minecraft lore",""})
table.insert(t,{"da.ka","dragon","noun","","sounds cool enough",""})
table.insert(t,{"da.ka.fe.lish","cat","noun","dragon companion","@lunadragofelis@embracing.space","casual way"})
table.insert(t,{"da.ka.un","dragonborn","noun","","",""})
table.insert(t,{"da.ti","dirty","modifier","","",""})
table.insert(t,{"de.da.la","cold","noun","","",""})
table.insert(t,{"dee","straight","modifier","","",""})
table.insert(t,{"di.ra.fow","deerfox","noun","","",""})
table.insert(t,{"di.rash","fox","noun","","",""})
table.insert(t,{"dia.ku","to live","verb","","",""})
table.insert(t,{"din.di","in, inside","marker","","",""})
table.insert(t,{"do.me","dust","noun","","",""})
table.insert(t,{"dra","indicates question","marker","","",""})
table.insert(t,{"dsho.ei","fate","noun","","fern's name",""})
table.insert(t,{"dsho.ei.an","shared destiny, together","modifier","fate + path","fern's name","walking the same path -> sharing the same fate -> together"})
table.insert(t,{"dsho.ei.an.un","romantic partner, (fated)","noun","fate + path + being","fern's name",""})
table.insert(t,{"du.tan","smoke","noun","","",""})
table.insert(t,{"duch","seed","noun","","",""})
table.insert(t,{"e","which","pronoun","","",""})
table.insert(t,{"e.ma","who","pronoun","","",""})
table.insert(t,{"e.mat","where","pronoun","","",""})
table.insert(t,{"e.nya","when","pronoun","","",""})
table.insert(t,{"e.o","how","pronoun","","",""})
table.insert(t,{"e.wa.la","root","noun","","",""})
table.insert(t,{"e.yu","what","pronoun","","",""})
table.insert(t,{"ee","new, yes","modifier, expression","","",""})
table.insert(t,{"el.fu","animal","noun","","",""})
table.insert(t,{"en.te","duck","noun","","",""})
table.insert(t,{"ep.shi","leaf","noun","","",""})
table.insert(t,{"esh.dre","moon, secret","noun","demonic","esdreel",""})
table.insert(t,{"fa","not","modifier","","",""})
table.insert(t,{"fe.la","dog","noun","heon companion, fe.li","",""})
table.insert(t,{"fe.li","companion","noun","","from dakafelish, cat",""})
table.insert(t,{"fe.rab","forest","noun","","",""})
table.insert(t,{"fern","river","noun","","fern's name",""})
table.insert(t,{"fi.lo","flower","noun","","",""})
table.insert(t,{"fich","fish","noun","","from fish",""})
table.insert(t,{"fin.a","finger","noun","","",""})
table.insert(t,{"fiu.ku","to blow","verb","","",""})
table.insert(t,{"fo.ri","hair","noun","","",""})
table.insert(t,{"fow","deer","noun","","from fox in deerfox",""})
table.insert(t,{"fu.fo","belly","noun","","",""})
table.insert(t,{"fu.ra","full, whole","modifier","","",""})
table.insert(t,{"ga.da.la","warm","modifier","","",""})
table.insert(t,{"ga.la","far","modifier","","",""})
table.insert(t,{"ga.le","stick","noun","","",""})
table.insert(t,{"ga.mat","there","pronoun","","",""})
table.insert(t,{"ga.o","that","pronoun","","",""})
table.insert(t,{"gan.ku","to go there","verb","","",""})
table.insert(t,{"ge.shi.ku","to sit","verb","","",""})
table.insert(t,{"grak.ma.dash","god of justice","noun","umyr fire + justice + power, grak + mada + ish","from old personal minecraft lore","grakmadaz is the deity that makes justice upon the world."})
table.insert(t,{"gu.gu","ear","noun","","",""})
table.insert(t,{"gu.gun.ku","to hear","verb","","",""})
table.insert(t,{"gu.me","sand","noun","","",""})
table.insert(t,{"gu.ra.ki.ku","to burn","verb","","",""})
table.insert(t,{"gu.ra.ton","green","modifier","","",""})
table.insert(t,{"gu.rak","fire","noun","","",""})
table.insert(t,{"ha.mi","heavy","modifier","","",""})
table.insert(t,{"hansh","failure","noun","","hans dont be mean to yourself :c",""})
table.insert(t,{"heag.ku","to vomit","verb","","",""})
table.insert(t,{"ho.ku","to freeze","verb","","",""})
table.insert(t,{"ho.wi.to","ice","noun","","",""})
table.insert(t,{"hu.ma","few, small quantity","modifier","","",""})
table.insert(t,{"hun","left","noun, modifier","","",""})
table.insert(t,{"i.fa","fat","noun","","",""})
table.insert(t,{"i.ke","bad","modifier","","",""})
table.insert(t,{"i.ma","he, she, they, it","pronoun","","",""})
table.insert(t,{"i.pa","road","noun","","",""})
table.insert(t,{"i.pa.li","feminity","noun","","","not quite feminity as we understand it, but a more distanced and stranger interpretation of it. "})
table.insert(t,{"i.shi","beauty","noun","","@sugarbell aka isi",""})
table.insert(t,{"i.shi.ton","pink","noun","heonian beauty + color, ishi + ton","","pink is pretty."})
table.insert(t,{"i.shu.ku","to float","verb","","",""})
table.insert(t,{"ier","hope","noun","","",""})
table.insert(t,{"k.yu","with <someone>","marker","","",""})
table.insert(t,{"ka.la","near","modifier","","",""})
table.insert(t,{"ka.mat","here","pronoun","","",""})
table.insert(t,{"ka.o","this","pronoun","","",""})
table.insert(t,{"ka.we","thin","modifier","","",""})
table.insert(t,{"kan.ku","to come here","verb","","",""})
table.insert(t,{"kash.ku","to break","verb","","",""})
table.insert(t,{"ki.ku","to say, to chat","verb","","",""})
table.insert(t,{"ku","to be","verb","","","basic verbal form."})
table.insert(t,{"ku.ku.ku","to embody","noun","","",""})
table.insert(t,{"kya.ny","long","modifier","","",""})
table.insert(t,{"la.e","spouse","noun","","",""})
table.insert(t,{"la.fen","feather","noun","","",""})
table.insert(t,{"li.ku","to like","verb","umyr love + verb form, li","",""})
table.insert(t,{"li.pa","liver","noun","","",""})
table.insert(t,{"lu.fu","guts","noun","","",""})
table.insert(t,{"lu.ku","to die","verb","","",""})
table.insert(t,{"lu.nya","dawn, dusk","noun","","from dusk, dawn , (@lunadragofelis@embracing.space)",""})
table.insert(t,{"lu.nya.ton","orange","noun","","","color of dusk/dawn"})
table.insert(t,{"ma","concenciousness","noun","","",""})
table.insert(t,{"ma.chun","examination","noun","","",""})
table.insert(t,{"ma.ra","day","noun","","",""})
table.insert(t,{"ma.ya.ri","smooth","modifier","","",""})
table.insert(t,{"maa.me","if","marker","","",""})
table.insert(t,{"me.ku","to dig","verb","","",""})
table.insert(t,{"me.lu","blood","noun","","",""})
table.insert(t,{"me.lu","blood","noun","","",""})
table.insert(t,{"me.lu.lae","louse","noun","","","blood sucker. also derogatory."})
table.insert(t,{"me.lu.ton","red","noun","","","color of blood"})
table.insert(t,{"me.ta","tail","noun","","",""})
table.insert(t,{"mei","snow, safety","noun","heonian","one of my names , i want it to be something pretty, also one of fern's name (safety)",""})
table.insert(t,{"mi","time","noun","","",""})
table.insert(t,{"mi.gu.ra","daytime","noun","","",""})
table.insert(t,{"mi.ku","to put into words","verb","","bincat",""})
table.insert(t,{"mi.la.ta","sky","noun","","",""})
table.insert(t,{"mi.me","past","noun","","",""})
table.insert(t,{"mi.min.ku","to see","veb","","",""})
table.insert(t,{"mi.nya","present","noun","","",""})
table.insert(t,{"mi.ra.ku","to fear","verb","","",""})
table.insert(t,{"mi.sha.ku","to fight","verb","","",""})
table.insert(t,{"mi.shi","nighttime","noun","","",""})
table.insert(t,{"mi.wa","cat","noun","sounds cats make","",""})
table.insert(t,{"mi.we","future","noun","","",""})
table.insert(t,{"mimi","eye","noun","","",""})
table.insert(t,{"mir.he.on","myrheon (country)","noun","umyr myr + heonian heon, mir + heon","from poem",""})
table.insert(t,{"mu.ku","to kiss","verb","","",""})
table.insert(t,{"mya","preceeds subordinate clause","marker","","","marker after chu / subject yu, before subordinate clause"})
table.insert(t,{"nao.mi","dream","noun","","",""})
table.insert(t,{"nao.mi.min.ku","to dream","verb","","",""})
table.insert(t,{"nya","hi (informal)","expression","from nyan.pash hello","",""})
table.insert(t,{"nya.ba.ku","to tie","verb","","",""})
table.insert(t,{"nya.ku","to ask for","verb","","",""})
table.insert(t,{"nya.wa","rope","noun","","",""})
table.insert(t,{"nyan.pash","hello!, good morning!","expression","","from nyanpasu",""})
table.insert(t,{"nyu.sha.ku","to suck","verb","","",""})
table.insert(t,{"o.li.dia","goddess of life","noun","umyr care + love + life , o + li + dia","from old personal minecraft lore","olidia is the deity who takes care upon all living creatures."})
table.insert(t,{"pa.ku","to stand","verb","","",""})
table.insert(t,{"pa.lo","foot","noun","","",""})
table.insert(t,{"pa.re","knowledge","noun","","",""})
table.insert(t,{"pa.re.ku","to know, to learn","verb","","",""})
table.insert(t,{"pa.ru","stupid","modifier","","",""})
table.insert(t,{"par.un","human","noun","","",""})
table.insert(t,{"paronya","understanding, agreement","noun","","",""})
table.insert(t,{"pat","back","noun","","",""})
table.insert(t,{"pat.ku","to spit","verb","","",""})
table.insert(t,{"pau.me","meat","noun","","",""})
table.insert(t,{"pe.fin.a","finernail","noun","","",""})
table.insert(t,{"pe.ma","wide","modifier","","",""})
table.insert(t,{"pe.ma.mat","mountain","noun","","",""})
table.insert(t,{"pe.ri","claws","noun","","",""})
table.insert(t,{"pe.ri.ku","to scratch","verb","","",""})
table.insert(t,{"pe.wa.mat","sea","noun","","",""})
table.insert(t,{"pee.ka.ku","to think, to become aware","verb","","",""})
table.insert(t,{"pee.ta.ku","to pull, to remove (something)","verb","","",""})
table.insert(t,{"pen.ya","dog","noun","","",""})
table.insert(t,{"pew.ku","to turn","verb","","",""})
table.insert(t,{"pi.ke.she","rotten","modifier","","",""})
table.insert(t,{"pi.she.ku","to drink","verb","","",""})
table.insert(t,{"po.me","stone","noun","","",""})
table.insert(t,{"po.nya","good","modifier","","",""})
table.insert(t,{"pon.me","because","marker","","",""})
table.insert(t,{"pu","star","noun","heonian","actually puropu was first",""})
table.insert(t,{"pu.ro.pu","sol, sun","noun","heonian pu + ro + pu, star among stars","",""})
table.insert(t,{"pu.ro.ton","yellow","modifier","","",""})
table.insert(t,{"ra.ya","parent","noun","","",""})
table.insert(t,{"re","i","pronoun","","",""})
table.insert(t,{"ri","sharp","modifier","","",""})
table.insert(t,{"ri.la","horns","noun","demonic","",""})
table.insert(t,{"rin","right","noun, modifier","","",""})
table.insert(t,{"ro","among","marker","","",""})
table.insert(t,{"rop.sha","dull","modifier","","",""})
table.insert(t,{"sha","change","noun","demonic","",""})
table.insert(t,{"sha.lat","salt, salty","noun, modifier","","",""})
table.insert(t,{"sha.pan","other","pronoun","","",""})
table.insert(t,{"shaf.ra","elegant","modifier","","",""})
table.insert(t,{"shai.ku","to add (math)","verb","","",""})
table.insert(t,{"shai.rup","math","noun","","",""})
table.insert(t,{"shash.ku","to split, to cut, to divide","verb","","",""})
table.insert(t,{"she.ka","mouth","noun","","",""})
table.insert(t,{"she.ku","to eat","verb","","",""})
table.insert(t,{"she.ma","louse","noun","","",""})
table.insert(t,{"she.pa","fruit","noun","","",""})
table.insert(t,{"sheb","head","noun","","",""})
table.insert(t,{"shen","direction","noun","umyr movement, shen","zenish was first, zen -> dshen -> shen",""})
table.insert(t,{"shen.i.sha","west","noun","heon zenish","",""})
table.insert(t,{"shen.i.sha.pe.ra","northwest","noun","heon west + north, shen.i.sha + shen.pe.ra","",""})
table.insert(t,{"shen.i.sha.ron","southwest","noun","heon west + south, shen.i.sha + shen.o.ron","",""})
table.insert(t,{"shen.lu.mo.ro","southeast","noun","heon east + south, shen.i.lun.a + shen.o.ron","",""})
table.insert(t,{"shen.lu.nya","east","noun","heon dusk, dawn","",""})
table.insert(t,{"shen.lun.pe.ra","northeast","noun","heon east + north, shen.lun.a + shen.pe.ra","",""})
table.insert(t,{"shen.o.ron","south","noun","heonian","",""})
table.insert(t,{"shen.pe.ra","north","noun","heonian","",""})
table.insert(t,{"shi","leg","noun","","",""})
table.insert(t,{"shi.de.ton","black","modifier","","",""})
table.insert(t,{"shi.fu.mat","snake","noun","","",""})
table.insert(t,{"shi.nya","breast","noun","","",""})
table.insert(t,{"shi.ra.ku","to have pending","verb","","",""})
table.insert(t,{"shin.da","wind","noun","","",""})
table.insert(t,{"shin.li","masculitnity","noun","","","not quite masculinity as we understand it, but a more distanced and stranger interpretation of it. "})
table.insert(t,{"shon.ku","to smell","verb","","",""})
table.insert(t,{"shoo","essence, core, heart","noun","","",""})
table.insert(t,{"shoo.ma","spirit","noun","","",""})
table.insert(t,{"shoo.me.ku","to bury","verb","","",""})
table.insert(t,{"ta.fa.ku","to wipe","verb","","",""})
table.insert(t,{"ta.mi.nya","innocence, honesty","noun, modifier","","fern's name","in a way of pure"})
table.insert(t,{"ta.te.ti","heart","noun","","",""})
table.insert(t,{"te.a.ga.ku","to throw","verb","","",""})
table.insert(t,{"te.ba.ku","to squeeze, to hug","verb","","",""})
table.insert(t,{"te.cha","skin","noun","","",""})
table.insert(t,{"te.ga.me","story","noun","","",""})
table.insert(t,{"te.ku","to hurt, to hit","verb","","",""})
table.insert(t,{"te.ma","neck","noun","","",""})
table.insert(t,{"te.me.lu.ku","to stab and make bleed","verb","","",""})
table.insert(t,{"te.yo","knee","noun","","",""})
table.insert(t,{"tee.ku","to push, to place (something)","verb","","",""})
table.insert(t,{"tek.da","ash","noun","","",""})
table.insert(t,{"ti.pa.sha","tree","noun","","",""})
table.insert(t,{"ti.pash","bark","noun","","",""})
table.insert(t,{"ti.she.ku","to bite","verb","","",""})
table.insert(t,{"ti.ta","teeth","noun","","",""})
table.insert(t,{"to","and","marker","","",""})
table.insert(t,{"tree.ya","ambiguous","modifier","","",""})
table.insert(t,{"u.da","old","modifier","","",""})
table.insert(t,{"u.ta.ku","to give birth","verb","","",""})
table.insert(t,{"u.ta.yan","childbearer","noun","","","they also get to name the children"})
table.insert(t,{"u.ti.ku","to create","verb","","",""})
table.insert(t,{"u.tia.raun","deity","noun","","",""})
table.insert(t,{"u.u","no","expression","","",""})
table.insert(t,{"u.wa.ton","white","modifier","","",""})
table.insert(t,{"u.wa.wa","bone","noun","","",""})
table.insert(t,{"u.we","worm","noun","","",""})
table.insert(t,{"un","being","noun","","",""})
table.insert(t,{"un.bae","child","noun","","",""})
table.insert(t,{"un.be.taf","adult","noun","","",""})
table.insert(t,{"wa.ek","thick","modifier","","",""})
table.insert(t,{"wa.mat","lake","noun","","",""})
table.insert(t,{"wa.to","water","noun","","",""})
table.insert(t,{"wa.wo.te","rain","noun","","",""})
table.insert(t,{"wan.to","wet","modifier","","",""})
table.insert(t,{"we.nya","future","noun","","",""})
table.insert(t,{"wi.ki","song","noun","","",""})
table.insert(t,{"wi.ki.ku","to sing","verb","","",""})
table.insert(t,{"wi.mo.ku","to play","verb","","",""})
table.insert(t,{"wi.po.mat","planet, earth","noun","","",""})
table.insert(t,{"wi.ta.ku","to wash","verb","","",""})
table.insert(t,{"wi.wi.ku","to laugh","verb","","",""})
table.insert(t,{"win.ku","to fly","verb","","",""})
table.insert(t,{"wind","wings","noun","","",""})
table.insert(t,{"wo.k.ku","to fall","verb","","",""})
table.insert(t,{"wuu.ga","fog","noun","","",""})
table.insert(t,{"ya.ku","to give","verb","","",""})
table.insert(t,{"yan.wa","ocean","noun","","",""})
table.insert(t,{"yea.mat","area","noun","","",""})
table.insert(t,{"yesh","adorable","modifier","","",""})
table.insert(t,{"yi.ma","year","noun","","",""})
table.insert(t,{"yu","denotes topic, emphasis","marker","","","overwrites subject if they are the same. otherwise, goes after subject marker"})
return t