EmergentSemanticDialogueFramework 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml ADDED
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA256:
3
+ metadata.gz: d4a33fe2478a54c2c70bc4a661305e41248708e85c61beec6dc50d5511a64812
4
+ data.tar.gz: 8fb915d077b7ac760fabf983ab68da2df20129ca8c9f74acb43891b864cf545c
5
+ SHA512:
6
+ metadata.gz: 1e81af516fbda281801eb069e19c87cdc33838d56cb3258abb1ff0d29355dc7d44bf3ba355f5e312df0fde77b71e60ccb87f2e6abd774f147c3d485918a0e26e
7
+ data.tar.gz: 042b1f7dd162c546338e20eae63a2267af0c14dd31bd65beaf2df997d08b673cf8a288dc88e51db9f4d103ef15b59f210c57f5bfdde88912e76389a2b9fa6b56
data/README.md ADDED
@@ -0,0 +1,17 @@
1
+ # EmergentSemanticDialogueFramework
2
+
3
+ This is the stand alone application for Emergent Semantic Dialogue Framework. It generates its own input that is this self-reinforced, with only the reinforced dialogue strings being what becomes spoken dialogue the MC speaks.
4
+
5
+ ## Installation
6
+
7
+ ~~~
8
+ gem install EmergentSemanticDialogueFramework
9
+ ~~~
10
+
11
+ ## Usage
12
+
13
+ Coming Soon
14
+
15
+ ## Contributing
16
+
17
+ Bug reports and pull requests are welcome on GitHub at https://github.com/BequestDeCendresStudios/BequestDeCendresFramework.
data/Rakefile ADDED
@@ -0,0 +1,4 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "bundler/gem_tasks"
4
+ task default: %i[]
@@ -0,0 +1,5 @@
1
+ # frozen_string_literal: true
2
+
3
+ module EmergentSemanticDialogueFramework
4
+ VERSION = "0.1.0"
5
+ end
@@ -0,0 +1,449 @@
1
+ # frozen_string_literal: true
2
+
3
+ require_relative "EmergentSemanticDialogueFramework/version"
4
+
5
+ module EmergentSemanticDialogueFramework
6
+ class Error < StandardError; end
7
+
8
+ class SelfReinforcer
9
+ def self.word_class
10
+ word_classes = [
11
+ [
12
+ [["Le", "Le"], ["Le", "La"], ["Le", "Les"]],
13
+ [["La", "Le"], ["La", "La"], ["La", "Les"]],
14
+ [["Les", "Le"], ["Les", "La"], ["Les", "Les"]],
15
+ ], [
16
+ [["Anu", "Anu"], ["Anu", "Ana"], ["Anu", "Anos"]],
17
+ [["Ana", "Anu"], ["Ana", "Ana"], ["Ana", "Anos"]],
18
+ [["Anos", "Anu"], ["Anos", "Ana"], ["Anos", "Anus"]],
19
+ ], [
20
+ [["Lanu", "Lanu"], ["Lanu", "Lana"], ["Lanu", "Lanos"]],
21
+ [["Lana", "Lanu"], ["Lana", "Lana"], ["Lana", "Lanos"]],
22
+ [["Lanos", "Lanu"], ["Lanos", "Lana"], ["Lanos", "Lanos"]],
23
+ ],
24
+ ]
25
+
26
+ context_window = [0, 1, 2]
27
+ row_options = [0, 1, 2]
28
+ col_options = [0, 1, 2]
29
+ arr_options = [0, 1]
30
+
31
+ cur_con = context_window.sample
32
+ cur_row = row_options.sample
33
+ cur_col = col_options.sample
34
+ cur_arr = arr_options.sample
35
+
36
+ @chosen_word_class = word_classes[cur_con][cur_row][cur_col][cur_arr]
37
+ end
38
+
39
+ def self.noun
40
+ ho = "homme" #, "ommehay"]
41
+ fe = "femme" #, "emmefay"]
42
+ fi = "fille" #, "illefay"]
43
+ ga = "garcon" #, "arcongay"]
44
+ ta = "tante" #, "antetay"]
45
+ oj = "oncle" #, "cleoney"]
46
+ cofi = "cousinfille" #, "ousincay illefay"]
47
+ coga = "cousingarcon" # , "ousincay arcongay"]
48
+ grm = "grandmere" # , "randgay eremay"]
49
+ grp = "grandpere" #, "randgay erepay"]
50
+
51
+ ct = "chat" #, "atchay"]
52
+ ch = "chien" #, "ienchay"]
53
+ oi = "oiseau" # , "seauoiay"]
54
+ gr = "souris" #, "ourisay"]
55
+ ou = "ours" #, "ursoay"]
56
+ wo = "orgueil" #, "gueiloray"]
57
+ pr = "ostritch" #, "ritchostray"]
58
+ po = "jiraff" #, "iraffjay"]
59
+ pi = "écureuil" #, "ureuilecay"]
60
+
61
+ m = "maison" # , "aisonmay"]
62
+ c = "cabin" #, "abincay"]
63
+ e = "ecole" #, "coleay"]
64
+
65
+ oju = "ojijaku" #, "jijakuoay"]
66
+ neo = "ne ojijaku" #, "ne jijakuoay"]
67
+
68
+ # [context window][row][col][arr]
69
+
70
+ nouns = [
71
+ [[ho, ho], [ho, fe], [ho, fi], [ho, ga], [ho, ta], [ho, oj], [ho, cofi], [ho, coga], [ho, grm], [ho, grp]],
72
+ [[fe, ho], [fe, fe], [fe, fi], [fe, ga], [fe, ta], [fe, oj], [fe, cofi], [fe, coga], [fe, grm], [fe, grp]],
73
+ [[fi, ho], [fi, fe], [fi, fi], [fi, ga], [fi, ta], [fi, oj], [fi, cofi], [fi, coga], [fi, grm], [fi, grp]],
74
+ [[ga, ho], [ga, fe], [ga, fi], [ga, ga], [ga, ta], [ga, oj], [ga, cofi], [ga, coga], [ga, grm], [ga, grp]],
75
+ [[ta, ho], [ta, fe], [ta, fi], [ta, ga], [ta, ta], [ta, oj], [ta, cofi], [ta, coga], [ta, grm], [ta, grp]],
76
+ [[oj, ho], [oj, fe], [oj, fi], [oj, ga], [oj, ta], [oj, oj], [oj, cofi], [oj, coga], [oj, grm], [oj, grp]],
77
+ [[cofi, ho], [cofi, fe], [cofi, fi], [cofi, ga], [cofi, ta], [cofi, oj], [cofi, cofi], [cofi, coga], [cofi, grm], [cofi, grp]],
78
+ [[coga, ho], [coga, fe], [coga, fi], [coga, ga], [coga, ta], [coga, oj], [coga, cofi], [coga, coga], [coga, grm], [coga, grp]],
79
+ [[grm, ho], [grm, fe], [grm, fi], [grm, ga], [grm, ta], [grm, oj], [grm, cofi], [grm, coga], [grm, grm], [grm, grp]],
80
+ [[grp, ho], [grp, fe], [grp, fi], [grp, ga], [grp, ta], [grp, oj], [grp, cofi], [grp, coga], [grp, grm], [grp, grp]],
81
+ ], [
82
+ [[ct, ct], [ct, ch], [ct, oi], [ct, gr], [ct, wo], [ct, ou], [ct, pr], [ct, po]],
83
+ [[ch, ct], [ch, ch], [ch, oi], [ch, gr], [ch, wo], [ch, ou], [ch, pr], [ch, po]],
84
+ [[oi, ct], [oi, ch], [oi, oi], [pi, gr], [oi, wo], [oi, ou], [oi, pr], [oi, po]],
85
+ [[gr, ct], [gr, ch], [gr, oi], [gr, gr], [gr, wo], [gr, ou], [gr, pr], [gr, po]],
86
+ ], [
87
+ [[m, m], [m, c], [m, e]],
88
+ [[c, m], [c, c], [c, e]],
89
+ [[e, m], [e, c], [e, e]],
90
+ ], [
91
+ [[oju, oju], [oju, neo]],
92
+ [[neo, oju], [neo, neo]],
93
+ ]
94
+
95
+ #sanity_window = [0, 1]
96
+ context_window = [0, 1, 2, 3]
97
+
98
+ #cur_san = sanity_window.sample
99
+ cur_con = context_window.sample
100
+
101
+ if cur_con == 0
102
+ row_options = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
103
+ col_options = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
104
+ arr_options = [0, 1]
105
+
106
+ #cur_con = context_window.sample
107
+ cur_row = row_options.sample
108
+ cur_col = col_options.sample
109
+ cur_arr = arr_options.sample
110
+
111
+ @chosen_noun = nouns[cur_con][cur_row][cur_col][cur_arr]
112
+ elsif cur_con == 1
113
+ row_options = [0, 1, 2, 3]
114
+ col_options = [0, 1, 2, 3]
115
+ arr_options = [0, 1]
116
+
117
+ #cur_con = context_window.sample
118
+ cur_row = row_options.sample
119
+ cur_col = col_options.sample
120
+ cur_arr = arr_options.sample
121
+
122
+ @chosen_noun = nouns[cur_con][cur_row][cur_col][cur_arr]
123
+ elsif cur_con == 2
124
+ row_options = [0, 1, 2]
125
+ col_options = [0, 1, 2]
126
+ arr_options = [0, 1]
127
+
128
+ #cur_con = context_window.sample
129
+ cur_row = row_options.sample
130
+ cur_col = col_options.sample
131
+ cur_arr = arr_options.sample
132
+
133
+ @chosen_noun = nouns[cur_con][cur_row][cur_col][cur_arr]
134
+ elsif cur_con == 3
135
+ row_options = [0, 1]
136
+ col_options = [0, 1]
137
+ arr_options = [0, 1]
138
+
139
+ #cur_con = context_window.sample
140
+ cur_row = row_options.sample
141
+ cur_col = col_options.sample
142
+ cur_arr = arr_options.sample
143
+
144
+ @chosen_noun = nouns[cur_con][cur_row][cur_col][cur_arr]
145
+ end
146
+ end
147
+
148
+ def self.adjective
149
+ # tsin petite sucré roudy l'éducation sages
150
+ t = "tsin" # t,t t,p t,s t,r t,l t,s
151
+ p = "petite"
152
+ s1 = "sucré"
153
+ r = "roudy"
154
+ l = "l'éducation"
155
+ s2 = "sages"
156
+
157
+ adjectives = [
158
+ [[t, t], [t, p], [t, s1], [t, r], [t, l], [t, s2]],
159
+ [[p, t], [p, p], [p, s1], [p, r], [p, l], [p, s2]],
160
+ [[s1, t], [s1, p], [s1, s1], [s1, r], [s1, l], [s1, s2]],
161
+ [[r, t], [r, p], [r, s1], [r, r], [r, l], [r, s2]],
162
+ [[l, t], [l, p], [l, s1], [l, r], [l, l], [l, s2]],
163
+ [[s2, t], [s2, p], [s2, s1], [s2, r], [s2, l], [s2, s2]],
164
+ ], [
165
+ [[p, t], [p, p], [p, s1], [p, r], [p, l], [p, s2]],
166
+ [[s1, t], [s1, p], [s1, s1], [s1, r], [s1, l], [s1, s2]],
167
+ [[r, t], [r, p], [r, s1], [r, r], [r, l], [r, s2]],
168
+ [[l, t], [l, p], [l, s1], [l, r], [l, l], [l, s2]],
169
+ [[s2, t], [s2, p], [s2, s1], [s2, r], [s2, l], [s2, s2]],
170
+ [[t, t], [t, p], [t, s1], [t, r], [t, l], [t, s2]],
171
+ ], [
172
+ [[s1, t], [s1, p], [s1, s1], [s1, r], [s1, l], [s1, s2]],
173
+ [[r, t], [r, p], [r, s1], [r, r], [r, l], [r, s2]],
174
+ [[l, t], [l, p], [l, s1], [l, r], [l, l], [l, s2]],
175
+ [[s2, t], [s2, p], [s2, s1], [s2, r], [s2, l], [s2, s2]],
176
+ [[t, t], [t, p], [t, s1], [t, r], [t, l], [t, s2]],
177
+ [[p, t], [p, p], [p, s1], [p, r], [p, l], [p, s2]],
178
+ ], [
179
+ [[r, t], [r, p], [r, s1], [r, r], [r, l], [r, s2]],
180
+ [[l, t], [l, p], [l, s1], [l, r], [l, l], [l, s2]],
181
+ [[s2, t], [s2, p], [s2, s1], [s2, r], [s2, l], [s2, s2]],
182
+ [[t, t], [t, p], [t, s1], [t, r], [t, l], [t, s2]],
183
+ [[p, t], [p, p], [p, s1], [p, r], [p, l], [p, s2]],
184
+ [[s1, t], [s1, p], [s1, s1], [s1, r], [s1, l], [s1, s2]],
185
+ ], [
186
+ [[l, t], [l, p], [l, s1], [l, r], [l, l], [l, s2]],
187
+ [[s2, t], [s2, p], [s2, s1], [s2, r], [s2, l], [s2, s2]],
188
+ [[t, t], [t, p], [t, s1], [t, r], [t, l], [t, s2]],
189
+ [[p, t], [p, p], [p, s1], [p, r], [p, l], [p, s2]],
190
+ [[s1, t], [s1, p], [s1, s1], [s1, r], [s1, l], [s1, s2]],
191
+ [[r, t], [r, p], [r, s1], [r, r], [r, l], [r, s2]],
192
+ ], [
193
+ [[s2, t], [s2, p], [s2, s1], [s2, r], [s2, l], [s2, s2]],
194
+ [[t, t], [t, p], [t, s1], [t, r], [t, l], [t, s2]],
195
+ [[p, t], [p, p], [p, s1], [p, r], [p, l], [p, s2]],
196
+ [[s1, t], [s1, p], [s1, s1], [s1, r], [s1, l], [s1, s2]],
197
+ [[r, t], [r, p], [r, s1], [r, r], [r, l], [r, s2]],
198
+ [[l, t], [l, p], [l, s1], [l, r], [l, l], [l, s2]],
199
+ ]
200
+
201
+ context_window = [0, 1, 2, 3, 4, 5]
202
+
203
+ row_options = [0, 1, 2, 3, 4, 5]
204
+ col_options = [0, 1, 2, 3, 4, 5]
205
+ arr_options = [0, 1]
206
+
207
+ cur_con = context_window.sample
208
+ cur_row = row_options.sample
209
+ cur_col = col_options.sample
210
+ cur_arr = arr_options.sample
211
+
212
+ @chosen_adjective = adjectives[cur_con][cur_row][cur_col][cur_arr]
213
+ end
214
+
215
+ def self.subject
216
+ subjects = [[
217
+ [["es", "es"], ["es", "es ne"]],
218
+ [["es ne", "es"], ["es ne", "es ne"]],
219
+ ], [
220
+ [["es", "es"], ["es", "es ne"]],
221
+ [["es ne", "es"], ["es ne", "es ne"]],
222
+ ]]
223
+
224
+ context_window = [0, 1]
225
+
226
+ row_options = [0, 1]
227
+ col_options = [0, 1]
228
+ arr_options = [0, 1]
229
+
230
+ cur_con = context_window.sample
231
+ cur_row = row_options.sample
232
+ cur_col = col_options.sample
233
+ cur_arr = arr_options.sample
234
+
235
+ @chosen_subject = subjects[cur_con][cur_row][cur_col][cur_arr]
236
+ end
237
+
238
+ def self.verb
239
+ verbs = [
240
+ [[avo, avo], [avo, cou], [avo, ser], [avo, dev]],
241
+ [[cou, avo], [cou, cou], [cou, ser], [cou, dev]],
242
+ [[ser, avo], [ser, cou], [ser, ser], [ser, dev]],
243
+ [[dev, avo], [dev, cou], [dev, ser], [dev, dev]],
244
+ ], [
245
+ [[cou, avo], [cou, cou], [cou, ser], [cou, dev]],
246
+ [[ser, avo], [ser, cou], [ser, ser], [ser, dev]],
247
+ [[dev, avo], [dev, cou], [dev, ser], [dev, dev]],
248
+ [[avo, avo], [avo, cou], [avo, ser], [avo, dev]],
249
+ ], [
250
+ [[ser, avo], [ser, cou], [ser, ser], [ser, dev]],
251
+ [[dev, avo], [dev, cou], [dev, ser], [dev, dev]],
252
+ [[avo, avo], [avo, cou], [avo, ser], [avo, dev]],
253
+ [[cou, avo], [cou, cou], [cou, ser], [cou, dev]],
254
+ ], [
255
+ [[dev, avo], [dev, cou], [dev, ser], [dev, dev]],
256
+ [[avo, avo], [avo, cou], [avo, ser], [avo, dev]],
257
+ [[cou, avo], [cou, cou], [cou, ser], [cou, dev]],
258
+ [[ser, avo], [ser, cou], [ser, ser], [ser, dev]],
259
+ ]
260
+
261
+ context_window = [0, 1, 2, 3]
262
+ row_options = [0, 1, 2, 3]
263
+ col_options = [0, 1, 2, 3]
264
+ arr_options = [0, 1]
265
+
266
+ cur_con = context_window.sample
267
+ cur_row = row_options.sample
268
+ cur_col = col_options.sample
269
+ cur_arr = arr_options.sample
270
+
271
+ @chosen_verb = verbs[cur_con][cur_row][cur_col][cur_arr]
272
+ end
273
+
274
+ def self.adverb
275
+ # rapidement lentement assurement tranquillement
276
+ r = "rapidement" # r,r r,l r,a r,t
277
+ l = "lentement"
278
+ a = "assurement"
279
+ t = "tranquillement"
280
+
281
+ adverbs = [
282
+ [[r, r], [r, l], [r, a], [r, t]],
283
+ [[l, r], [l, l], [l, a], [l, t]],
284
+ [[a, r], [a, l], [a, a], [a, t]],
285
+ [[t, r], [t, l], [t, a], [t, t]],
286
+ ], [
287
+ [[l, r], [l, l], [l, a], [l, t]],
288
+ [[a, r], [a, l], [a, a], [a, t]],
289
+ [[t, r], [t, l], [t, a], [t, t]],
290
+ [[r, r], [r, l], [r, a], [r, t]],
291
+ ], [
292
+ [[a, r], [a, l], [a, a], [a, t]],
293
+ [[t, r], [t, l], [t, a], [t, t]],
294
+ [[r, r], [r, l], [r, a], [r, t]],
295
+ [[l, r], [l, l], [l, a], [l, t]],
296
+ ], [
297
+ [[t, r], [t, l], [t, a], [t, t]],
298
+ [[r, r], [r, l], [r, a], [r, t]],
299
+ [[l, r], [l, l], [l, a], [l, t]],
300
+ [[a, r], [a, l], [a, a], [a, t]],
301
+ ]
302
+
303
+ context_window = [0, 1, 2, 3]
304
+ row_options = [0, 1, 2, 3]
305
+ col_options = [0, 1, 2, 3]
306
+ arr_options = [0, 1]
307
+
308
+ cur_con = context_window.sample
309
+ cur_row = row_options.sample
310
+ cur_col = col_options.sample
311
+ cur_arr = arr_options.sample
312
+
313
+ @chosen_adverb = verbs[cur_con][cur_row][cur_col][cur_arr]
314
+ end
315
+
316
+ def self.create_dialogue_input
317
+ EmergentSemanticDialogueFramework::SelfReinforcer.word_class
318
+ EmergentSemanticDialogueFramework::SelfReinforcer.noun
319
+ EmergentSemanticDialogueFramework::SelfReinforcer.adjective
320
+ EmergentSemanticDialogueFramework::SelfReinforcer.subject
321
+ EmergentSemanticDialogueFramework::SelfReinforcer.verb
322
+ EmergentSemanticDialogueFramework::SelfReinforcer.adverb
323
+
324
+ new_line = "#{@chosen_word_class} #{@chosen_noun} #{@chosen_adjective} #{@chosen_subject} #{@chosen_verb} #{@chosen_adverb}."
325
+
326
+ File.open("lib/npc/possible_lines.txt", "a") { |f|
327
+ f.puts new_line
328
+ }
329
+
330
+ puts "Created input for possible lines..."
331
+ end
332
+
333
+ def self.self_reinforcer
334
+ # 1 2 4 8 7 5
335
+ # 1 1,1 1,2 1,4 1,8 1,7 1,5
336
+ # 2 2,1 2,2 2,4 2,8 2,7 2,5
337
+ # 4 4,1 4,2 4,4 4,8 4,7 4,5
338
+ # 8 8,1 8,2 8,4 8,8 8,7 8,5
339
+ # 7 7,1 7,2 7,4 7,8 7,7 7,5
340
+ # 5 5,1 5,2 5,4 5,8 5,7 5,5
341
+
342
+ # 3 6 9
343
+ # 3 3,3 3,6 3,9
344
+ # 6 6,3 6,6 6,9
345
+ # 9 9,3 9,6 9,9
346
+
347
+ vortex = [ 1, 2, 4, 8, 7, 5 ]
348
+ charge = [ 3, 6, 9 ]
349
+
350
+ nested_vortex = [
351
+ [[1, 1], [1, 2], [1, 4], [1, 8], [1, 7], [1, 5]],
352
+ [[2, 1], [2, 2], [2, 4], [2, 8], [2, 7], [2, 5]],
353
+ [[4, 1], [4, 2], [4, 4], [4, 8], [4, 7], [4, 5]],
354
+ [[8, 1], [8, 2], [8, 4], [8, 8], [8, 7], [8, 5]],
355
+ [[7, 1], [7, 2], [7, 4], [7, 8], [7, 7], [7, 5]],
356
+ [[5, 1], [5, 2], [5, 4], [5, 8], [5, 7], [5, 5]],
357
+ ]
358
+
359
+ nested_charge = [
360
+ [[3, 3], [3, 6], [3, 9]],
361
+ [[6, 3], [6, 6], [6, 9]],
362
+ [[9, 3], [9, 6], [9, 9]],
363
+ ]
364
+
365
+ ## Choosing the first index value for vortex based loop.
366
+ row_options_vortex = [0, 1, 2, 3, 4, 5]
367
+ col_options_vortex = [0, 1, 2, 3, 4, 5]
368
+ arr_options_vortex = [0, 1]
369
+
370
+ v_cur_row = row_options_vortex.sample
371
+ v_cur_col = col_options_vortex.sample
372
+ v_cur_arr = arr_options_vortex.sample
373
+
374
+ ## Choosing the first index value for charge based loop.
375
+ row_options_charge = [0, 1, 2]
376
+ col_options_charge = [0, 1, 2]
377
+ arr_options_charge = [0, 1]
378
+
379
+ c_cur_row = row_options_vortex.sample
380
+ c_cur_col = col_options_vortex.sample
381
+ c_cur_arr = arr_options_vortex.sample
382
+
383
+ chosen_vortex_value = nested_vortex[v_cur_row][v_cur_col][v_cur_arr]
384
+ chosen_charge_vlaue = nested_charge[c_cur_row][c_cur_col][c_cur_arr]
385
+
386
+ ## Vortex Loop
387
+ chosen_vortex_value.times do
388
+ possible_lines = File.readlines("lib/npc/possible_lines.txt")
389
+ size_limit = possible_lines.size.to_i
390
+ index = 0
391
+
392
+ ideal_line = possible_line[chosen_vortex_value]
393
+
394
+ size_limit.times do
395
+ current_line = possible_lines[index]
396
+
397
+ if current_line == ideal_line
398
+ File.open("lib/npc/learned_lines.txt", "a") { |f|
399
+ f.puts current_line
400
+ }
401
+ else
402
+ #puts "> Current line did not match the ideal dialogue line..."
403
+ end
404
+
405
+ index = index + 1
406
+ end
407
+ end
408
+
409
+ ## Charge Loop
410
+ chosen_charge_value.times do
411
+ learned_lines = File.readlines("lib/npc/learned_lines.txt")
412
+ size_limit = learned_lines.size.to_i
413
+ index = 0
414
+
415
+ ideal_line = learned_line[chosen_vortex_value]
416
+
417
+ size_limit.times do
418
+ current_line = learned_lines[index]
419
+
420
+ if current_line == ideal_line
421
+ File.open("lib/npc/reinforced_lines.txt", "a") { |f|
422
+ f.puts current_line
423
+ }
424
+ else
425
+ puts "> Current line is not reinforced into longterm memory..."
426
+ end
427
+
428
+ index = index + 1
429
+ end
430
+ end
431
+ end
432
+ end
433
+
434
+ class OrateDialouge
435
+ def self.speak
436
+ dialogue = File.readlines("lib/npc/reinforced_lines.txt").shuffle
437
+ index = File.read("lib/npc/dialogue_index.txt").to_i
438
+
439
+ print "> "
440
+ puts dialogue[index]
441
+
442
+ File.open("lib/npc/dialogue_index.txt", "w") { |f|
443
+ index = index + 1
444
+
445
+ f.puts index
446
+ }
447
+ end
448
+ end
449
+ end
@@ -0,0 +1,4 @@
1
+ module EmergentSemanticDialogueFramework
2
+ VERSION: String
3
+ # See the writing guide of rbs: https://github.com/ruby/rbs#guides
4
+ end
metadata ADDED
@@ -0,0 +1,50 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: EmergentSemanticDialogueFramework
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.1.0
5
+ platform: ruby
6
+ authors:
7
+ - LWFlouisa
8
+ autorequire:
9
+ bindir: exe
10
+ cert_chain: []
11
+ date: 2025-07-01 00:00:00.000000000 Z
12
+ dependencies: []
13
+ description: This generates its own input string that is used to create a file called
14
+ possible lines that the self-reinforcement dialogue framework used to learn on.
15
+ Then orates a random line of dialogue from reinforced lines.
16
+ email:
17
+ - lwflouisa@gmail.com
18
+ executables: []
19
+ extensions: []
20
+ extra_rdoc_files: []
21
+ files:
22
+ - README.md
23
+ - Rakefile
24
+ - lib/EmergentSemanticDialogueFramework.rb
25
+ - lib/EmergentSemanticDialogueFramework/version.rb
26
+ - sig/EmergentSemanticDialogueFramework.rbs
27
+ homepage: https://bequestdecendresstudios.github.io/BequestDeCendresFramework/
28
+ licenses: []
29
+ metadata: {}
30
+ post_install_message:
31
+ rdoc_options: []
32
+ require_paths:
33
+ - lib
34
+ required_ruby_version: !ruby/object:Gem::Requirement
35
+ requirements:
36
+ - - ">="
37
+ - !ruby/object:Gem::Version
38
+ version: 3.0.0
39
+ required_rubygems_version: !ruby/object:Gem::Requirement
40
+ requirements:
41
+ - - ">="
42
+ - !ruby/object:Gem::Version
43
+ version: '0'
44
+ requirements: []
45
+ rubygems_version: 3.3.5
46
+ signing_key:
47
+ specification_version: 4
48
+ summary: Stand alone Emergent Semantic Dialogue Framework that will eventually be
49
+ used in Bequest De Cendres and Yumemoire.
50
+ test_files: []