adapted to the new word file
This commit is contained in:
parent
fe44c40586
commit
d0f7635c65
49
get_words.lua
Normal file
49
get_words.lua
Normal file
@ -0,0 +1,49 @@
|
||||
function getWords(path)
|
||||
local file = io.open(path,"r")
|
||||
local text = file:read("*all")
|
||||
file:close()
|
||||
|
||||
local t = {} -- this table will hold our words!!! words format is {word, translation, types, canon origin, meta origin, notes} last three are optional.
|
||||
|
||||
local p = 0 -- current position, nil if at end of file
|
||||
while p do
|
||||
p = p + 1
|
||||
|
||||
local new_word_data = ""
|
||||
|
||||
-- this isnt the whole word yet, but all the data around it separated with tabs, so lets get what we want.
|
||||
-- careful if its uh end of file tho!
|
||||
local np = string.find(text,"\n",p) -- np is the next word. this is so we can slice it
|
||||
if np then
|
||||
new_word_data = string.sub(text, p, np-1)
|
||||
else
|
||||
new_word_data = string.sub(text, p)
|
||||
end
|
||||
|
||||
if new_word_data ~= "" then
|
||||
local new_word = {} -- we'll hold it all here once spliced
|
||||
local wp = 0 -- word data position!
|
||||
while wp do -- nil if at end of string so.
|
||||
wp = wp + 1 -- lets move past the tab we just found
|
||||
|
||||
local wnp = string.find(new_word_data, " ",wp)
|
||||
local stuff = ""
|
||||
-- we now splice the word every tab and add it to the uhhh
|
||||
if wnp then
|
||||
stuff = string.sub(new_word_data, wp, wnp-1) or stuff
|
||||
else
|
||||
stuff = string.sub(new_word_data,wp) or stuff
|
||||
end
|
||||
|
||||
table.insert(new_word,stuff)
|
||||
wp = wnp
|
||||
end
|
||||
-- now we add the word ot the word table
|
||||
table.insert(t, new_word)
|
||||
end
|
||||
-- and move on the next
|
||||
p = np
|
||||
end
|
||||
-- now we just. return the table.
|
||||
return t
|
||||
end
|
4
main.lua
4
main.lua
@ -2,9 +2,10 @@
|
||||
|
||||
require "quick-terminal-customization"
|
||||
require "strings"
|
||||
require "get_words"
|
||||
|
||||
-- lets get the words
|
||||
words = dofile("heonian-content/words.lua")
|
||||
words = getWords("heonian-content/words")
|
||||
|
||||
-- lets slice up the multiple types a word can be into lists
|
||||
ListNouns = {}
|
||||
@ -54,6 +55,7 @@ function SentenceConstruct()
|
||||
if r == 1 then
|
||||
formal = true
|
||||
end
|
||||
formal = true
|
||||
-- lets get subject
|
||||
-- 80% pronoun
|
||||
-- 20% noun
|
||||
|
Loading…
Reference in New Issue
Block a user