narremgen 0.9.3__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. narremgen-0.9.3/LICENSE +21 -0
  2. narremgen-0.9.3/MANIFEST.in +1 -0
  3. narremgen-0.9.3/PKG-INFO +174 -0
  4. narremgen-0.9.3/README.md +138 -0
  5. narremgen-0.9.3/pyproject.toml +63 -0
  6. narremgen-0.9.3/setup.cfg +4 -0
  7. narremgen-0.9.3/src/narremgen/__init__.py +98 -0
  8. narremgen-0.9.3/src/narremgen/__main__.py +4 -0
  9. narremgen-0.9.3/src/narremgen/analyzestats.py +743 -0
  10. narremgen-0.9.3/src/narremgen/chapters.py +259 -0
  11. narremgen-0.9.3/src/narremgen/data/Advice_Urban_Walk.csv +37 -0
  12. narremgen-0.9.3/src/narremgen/data/Context_Urban_Walk.csv +37 -0
  13. narremgen-0.9.3/src/narremgen/data/Mapping_Urban_Walk.csv +37 -0
  14. narremgen-0.9.3/src/narremgen/data/merged_walk in the city.txt +896 -0
  15. narremgen-0.9.3/src/narremgen/data.py +393 -0
  16. narremgen-0.9.3/src/narremgen/export.py +704 -0
  17. narremgen-0.9.3/src/narremgen/gui.py +4482 -0
  18. narremgen-0.9.3/src/narremgen/llmcore.py +1269 -0
  19. narremgen-0.9.3/src/narremgen/main.py +1234 -0
  20. narremgen-0.9.3/src/narremgen/narratives.py +610 -0
  21. narremgen-0.9.3/src/narremgen/pipeline.py +544 -0
  22. narremgen-0.9.3/src/narremgen/segmenter.py +571 -0
  23. narremgen-0.9.3/src/narremgen/settings/DE.csv +11 -0
  24. narremgen-0.9.3/src/narremgen/settings/SN.csv +54 -0
  25. narremgen-0.9.3/src/narremgen/settings/SN_extended_raw.csv +110 -0
  26. narremgen-0.9.3/src/narremgen/settings/examples.txt +123 -0
  27. narremgen-0.9.3/src/narremgen/settings/examples_untaggued.txt +34 -0
  28. narremgen-0.9.3/src/narremgen/settings/narremgen_gui.ini +29 -0
  29. narremgen-0.9.3/src/narremgen/settings/narremgen_ui.ini +17 -0
  30. narremgen-0.9.3/src/narremgen/settings/narremgen_variants.ini +20 -0
  31. narremgen-0.9.3/src/narremgen/settings/operators_structural_raw.csv +18 -0
  32. narremgen-0.9.3/src/narremgen/settings/operators_stylistic_raw.csv +74 -0
  33. narremgen-0.9.3/src/narremgen/settings/prompt_direct_variant_example.txt +34 -0
  34. narremgen-0.9.3/src/narremgen/settings/prompt_formal_variant_example.txt +172 -0
  35. narremgen-0.9.3/src/narremgen/settings/style.txt +93 -0
  36. narremgen-0.9.3/src/narremgen/themes.py +518 -0
  37. narremgen-0.9.3/src/narremgen/utils.py +977 -0
  38. narremgen-0.9.3/src/narremgen/variants.py +953 -0
  39. narremgen-0.9.3/src/narremgen.egg-info/PKG-INFO +174 -0
  40. narremgen-0.9.3/src/narremgen.egg-info/SOURCES.txt +42 -0
  41. narremgen-0.9.3/src/narremgen.egg-info/dependency_links.txt +1 -0
  42. narremgen-0.9.3/src/narremgen.egg-info/entry_points.txt +3 -0
  43. narremgen-0.9.3/src/narremgen.egg-info/requires.txt +29 -0
  44. narremgen-0.9.3/src/narremgen.egg-info/top_level.txt +1 -0
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) RPriam 2025
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1 @@
1
+ recursive-include src/narremgen/settings *
@@ -0,0 +1,174 @@
1
+ Metadata-Version: 2.4
2
+ Name: narremgen
3
+ Version: 0.9.3
4
+ License-Expression: MIT
5
+ Project-URL: Homepage, https://github.com/narremgen
6
+ Project-URL: Repository, https://github.com/narremgen/narremgen
7
+ Project-URL: Issues, https://github.com/narremgen/narremgen/issues
8
+ Requires-Python: >=3.10
9
+ Description-Content-Type: text/markdown
10
+ License-File: LICENSE
11
+ Requires-Dist: requests
12
+ Requires-Dist: pandas
13
+ Requires-Dist: numpy
14
+ Requires-Dist: openai>=1.0.0
15
+ Requires-Dist: google-genai>=1.0.0
16
+ Provides-Extra: emotions
17
+ Requires-Dist: transformers>=4.40; extra == "emotions"
18
+ Requires-Dist: torch; extra == "emotions"
19
+ Requires-Dist: vaderSentiment; extra == "emotions"
20
+ Provides-Extra: textstats
21
+ Requires-Dist: lexicalrichness; extra == "textstats"
22
+ Provides-Extra: plots
23
+ Requires-Dist: matplotlib; extra == "plots"
24
+ Provides-Extra: gui
25
+ Provides-Extra: all
26
+ Requires-Dist: transformers>=4.40; extra == "all"
27
+ Requires-Dist: torch; extra == "all"
28
+ Requires-Dist: google-genai>=1.0.0; extra == "all"
29
+ Requires-Dist: openai>=1.0.0; extra == "all"
30
+ Requires-Dist: lexicalrichness; extra == "all"
31
+ Requires-Dist: matplotlib; extra == "all"
32
+ Requires-Dist: vaderSentiment; extra == "all"
33
+ Requires-Dist: python-docx; extra == "all"
34
+ Requires-Dist: reportlab; extra == "all"
35
+ Dynamic: license-file
36
+
37
+ # Narremgen
38
+
39
+ Narremgen is an experimental Python package for **structured narrative text generation**, combining narrative schemas (SN) and emotional dynamics (DE) to produce coherent short texts assembled into full booklets of advice or answers from a topic or a question with optional chapters.
40
+
41
+ Based on the methodology described in *Priam, R. (2025). Narrative and Emotional Structures for Generation of Short Texts for Advice*, it provides a reproducible multi-batch pipeline for controlled text generation using LLM models with narrative+emotional structures. This is a partial implementation of the method SN/DE/K for generation, modeling and analysis of texts.
42
+
43
+
44
+ ## Main modules of narremgen
45
+
46
+ - `pipeline`: Entry point for batch generation, variants, stats, and exports per topic run.
47
+ - `llmcore`: Unified LLM router (role→model mapping, retries, multi-provider support).
48
+ - `data`: Input preparation and CSV handling for topic–advice–prompt-based generation.
49
+ - `narratives`: Text post-processing, style control, and SN/DE-aware narrative realization.
50
+ - `variants`: Planning and batch rewriting into alternative styles (direct, formal, etc.) with stats.
51
+ - `themes`: LLM-based theme discovery and assignment for advice corpora, producing themes+assignments.
52
+ - `chapters`: Build chaptered corpora (CSV/JSON) from themes or manual grouping, for book-like exports.
53
+ - `export`: Plain-text and LaTeX exporters (merged `.txt` and `book_*.tex` from neutral and variants).
54
+ - `analyzestats`: Length, lexical, emotion and SN/DE distribution analysis, with CSV summaries and plots.
55
+ - `utils`: Shared helpers for workdirs, filenames, CSV repair, backups, and neutral corpus construction.
56
+ - `gui`: Optional Tkinter GUI for generation, or readings aligned/selected texts, or segmentation.
57
+ - `main`: Optional command-line terminal module for the generation with input arguments.
58
+
59
+
60
+ ## Key features
61
+
62
+ - Generation of a <u>Corpus</u> of <u>Stories</u> (of varying and controlled structures) and <u>Formal Texts</u> for advice from a topic (full sentence).
63
+ - Multi-batch narrative pipeline using a configurable LLM router (`llmcore`) across several providers with a command-line interface.
64
+ - Automatic topic and advice mapping, SN/DE-structured neutral generation, and aligned variant rewriting (direct, formal, other styles).
65
+ - Robust CSV workflow: filtering, renumbering, safe merging of advice/sentence/context/mapping, consistent filenames, variant workdirs.
66
+ - LLM-driven theme extraction and assignment, plus chapter construction for organizing texts into coherent sections (classes of texts).
67
+ - Plain-text and TeX export of neutral and variant corpora (merged narrative files and full chaptered books for text reading/selection).
68
+ - Integrated corpus analysis: lexical richness, length, emotion profiles, and SN/DE distributions, including neutral vs. variant comparison.
69
+ - Textual statistics and emotion statistics of specialized language models from the literature for evaluation of generated texts or corpus.
70
+ - Ready-to-use structure for reproducible experiments in text generation with emotions for character and educational content synthesis.
71
+ - Graphical user interface for generation with api key checkings, creation of variants, and reading/selection of aligned textes for a topic.
72
+ - Available connection to OpenAI, OpenRouter, Google-GenAI, Mistral, etc for text generation (see python code and interface for dry-run).
73
+ - No limited length for topic str, available command for adding file/str long text as context for advice or generation stages in pipeline.
74
+
75
+
76
+ ## Usage
77
+
78
+ ### Installation
79
+
80
+ ```
81
+ pip install narremgen
82
+ ```
83
+
84
+ ### Generation with Python and the package
85
+
86
+ ```python
87
+ import narremgen
88
+ from narremgen import pipeline
89
+
90
+ run_pipeline(
91
+ topic="Walking_in_the_city",
92
+ output_dir="./outputs",
93
+ assets_dir="./narremgen/settings",
94
+ n_batches=2,
95
+ n_per_batch=20,
96
+ output_format="txt",
97
+ verbose=False
98
+ )
99
+ ```
100
+
101
+ ### With command-line interface in the terminal
102
+
103
+ ```python
104
+ # Pipeline + variants (default, not user ones)
105
+ python -m narremgen.main \
106
+ --topic "Walking_in_the_city" \
107
+ --output-dir "./outputs" \
108
+ --batches 2 \
109
+ --per-batch 20 \
110
+ --output-format txt \
111
+ --verbose
112
+ ```
113
+
114
+ ```python
115
+ # Pipeline without variants (neutral only)
116
+ python -m narremgen.main \
117
+ --topic "Walking_in_the_city" \
118
+ --output-dir "./outputs" \
119
+ --batches 2 \
120
+ --per-batch 20 \
121
+ --output-format txt \
122
+ --skip-variants \
123
+ --verbose
124
+ ```
125
+
126
+ ```python
127
+ # Dry-test without generation pipeline
128
+ python -m narremgen.main \
129
+ --diagnostic-dry-run \
130
+ --verbose
131
+ ```
132
+
133
+ With command lines in the terminal: GUI <br>
134
+
135
+ ```python
136
+ # Interface generation+reading+saving
137
+ python -m narremgen.gui
138
+ ```
139
+
140
+ ## Other example of call (check exact model names, and documentation for texts)
141
+
142
+ ### OpenAI everywhere as simple default + export TeX booklet
143
+
144
+ narremgen --topic "Small habits, big effects" --output-dir "./out" --default-model "openai\gpt-4o-mini" --export-book-tex
145
+
146
+ ### Ollama local (offline) + TeX output + skip theme analysis
147
+
148
+ narremgen --topic "Home organisation and walking" --output-dir "./out" --default-model "ollama\gemma3:4b" --batches 3 --per-batch 30 --output-format tex --export-book-tex
149
+
150
+ ### OpenRouter mix: DeepSeek for mapping, Llama for narrative, GPT-4o-mini for the rest + multiple variants
151
+
152
+ narremgen --topic "Walk habits in the city" --output-dir "./out" --api-key-file "./llmkeys.txt" --model-advice "openrouter\openai/gpt-4o-mini" --model-mapping "openrouter\deepseek/deepseek-reasoner" --model-context "openrouter\openai/gpt-4o-mini" --model-narrative "openrouter\meta-llama/llama-3.1-70b-instruct" --model-variants-generation "openrouter\openai/gpt-4o-mini"
153
+
154
+ ### Mistral direct (OpenAI-compatible) + themes enabled with custom range and batch size
155
+
156
+ narremgen --topic "Healthy routines for a walk everyday" --output-dir "./out" --api-key-file "./llmkeys.txt" --default-model "mistral\mistral-large-latest" --themes-min 7 --themes-max 12 --themes-batch-size 30
157
+
158
+ ### Grok default + bypass variants generation to local Phi-4 (Ollama) with larger token budget
159
+
160
+ narremgen --topic "Walking around in a small town" --output-dir "./out" --api-key-file "./llmkeys.txt" --default-model "grok\grok-2-latest" --model-variants-generation "ollama\phi4:14b" --variant-batch-size 40 --variant-max-tokens 2500
161
+
162
+ ### Quick connectivity check (no files generated): diagnostic dry-run with longer timeout
163
+
164
+ narremgen --diagnostic-dry-run --model-advice "openrouter\deepseek/deepseek-chat" --request-timeout 90
165
+
166
+ ## Warnings
167
+
168
+ - **Practical note:** very large generations (e.g. high `--batches` × `--per-batch`) may take a long time, may produce repeated advice, and can fail due to rate limits/timeouts or oversized intermediate files (these issues are not handled automatically). Start small (e.g. `--batches 2 --per-batch 30`) and scale up while monitoring API usage.
169
+
170
+ - **Output integrity:** in some cases an LLM response can be malformed (wrong format) or truncated, which may lead to missing or unusable advice entries. If this happens, rerun the affected batch.
171
+
172
+ - **Current usability:** Only informed users or trainers should use this system in practice. <br>
173
+ Some advice may be missing or mistaken du to ia/programming. <br>
174
+ In future automatic checkings may be implemented for end user.
@@ -0,0 +1,138 @@
1
+ # Narremgen
2
+
3
+ Narremgen is an experimental Python package for **structured narrative text generation**, combining narrative schemas (SN) and emotional dynamics (DE) to produce coherent short texts assembled into full booklets of advice or answers from a topic or a question with optional chapters.
4
+
5
+ Based on the methodology described in *Priam, R. (2025). Narrative and Emotional Structures for Generation of Short Texts for Advice*, it provides a reproducible multi-batch pipeline for controlled text generation using LLM models with narrative+emotional structures. This is a partial implementation of the method SN/DE/K for generation, modeling and analysis of texts.
6
+
7
+
8
+ ## Main modules of narremgen
9
+
10
+ - `pipeline`: Entry point for batch generation, variants, stats, and exports per topic run.
11
+ - `llmcore`: Unified LLM router (role→model mapping, retries, multi-provider support).
12
+ - `data`: Input preparation and CSV handling for topic–advice–prompt-based generation.
13
+ - `narratives`: Text post-processing, style control, and SN/DE-aware narrative realization.
14
+ - `variants`: Planning and batch rewriting into alternative styles (direct, formal, etc.) with stats.
15
+ - `themes`: LLM-based theme discovery and assignment for advice corpora, producing themes+assignments.
16
+ - `chapters`: Build chaptered corpora (CSV/JSON) from themes or manual grouping, for book-like exports.
17
+ - `export`: Plain-text and LaTeX exporters (merged `.txt` and `book_*.tex` from neutral and variants).
18
+ - `analyzestats`: Length, lexical, emotion and SN/DE distribution analysis, with CSV summaries and plots.
19
+ - `utils`: Shared helpers for workdirs, filenames, CSV repair, backups, and neutral corpus construction.
20
+ - `gui`: Optional Tkinter GUI for generation, or readings aligned/selected texts, or segmentation.
21
+ - `main`: Optional command-line terminal module for the generation with input arguments.
22
+
23
+
24
+ ## Key features
25
+
26
+ - Generation of a <u>Corpus</u> of <u>Stories</u> (of varying and controlled structures) and <u>Formal Texts</u> for advice from a topic (full sentence).
27
+ - Multi-batch narrative pipeline using a configurable LLM router (`llmcore`) across several providers with a command-line interface.
28
+ - Automatic topic and advice mapping, SN/DE-structured neutral generation, and aligned variant rewriting (direct, formal, other styles).
29
+ - Robust CSV workflow: filtering, renumbering, safe merging of advice/sentence/context/mapping, consistent filenames, variant workdirs.
30
+ - LLM-driven theme extraction and assignment, plus chapter construction for organizing texts into coherent sections (classes of texts).
31
+ - Plain-text and TeX export of neutral and variant corpora (merged narrative files and full chaptered books for text reading/selection).
32
+ - Integrated corpus analysis: lexical richness, length, emotion profiles, and SN/DE distributions, including neutral vs. variant comparison.
33
+ - Textual statistics and emotion statistics of specialized language models from the literature for evaluation of generated texts or corpus.
34
+ - Ready-to-use structure for reproducible experiments in text generation with emotions for character and educational content synthesis.
35
+ - Graphical user interface for generation with api key checkings, creation of variants, and reading/selection of aligned textes for a topic.
36
+ - Available connection to OpenAI, OpenRouter, Google-GenAI, Mistral, etc for text generation (see python code and interface for dry-run).
37
+ - No limited length for topic str, available command for adding file/str long text as context for advice or generation stages in pipeline.
38
+
39
+
40
+ ## Usage
41
+
42
+ ### Installation
43
+
44
+ ```
45
+ pip install narremgen
46
+ ```
47
+
48
+ ### Generation with Python and the package
49
+
50
+ ```python
51
+ import narremgen
52
+ from narremgen import pipeline
53
+
54
+ run_pipeline(
55
+ topic="Walking_in_the_city",
56
+ output_dir="./outputs",
57
+ assets_dir="./narremgen/settings",
58
+ n_batches=2,
59
+ n_per_batch=20,
60
+ output_format="txt",
61
+ verbose=False
62
+ )
63
+ ```
64
+
65
+ ### With command-line interface in the terminal
66
+
67
+ ```python
68
+ # Pipeline + variants (default, not user ones)
69
+ python -m narremgen.main \
70
+ --topic "Walking_in_the_city" \
71
+ --output-dir "./outputs" \
72
+ --batches 2 \
73
+ --per-batch 20 \
74
+ --output-format txt \
75
+ --verbose
76
+ ```
77
+
78
+ ```python
79
+ # Pipeline without variants (neutral only)
80
+ python -m narremgen.main \
81
+ --topic "Walking_in_the_city" \
82
+ --output-dir "./outputs" \
83
+ --batches 2 \
84
+ --per-batch 20 \
85
+ --output-format txt \
86
+ --skip-variants \
87
+ --verbose
88
+ ```
89
+
90
+ ```python
91
+ # Dry-test without generation pipeline
92
+ python -m narremgen.main \
93
+ --diagnostic-dry-run \
94
+ --verbose
95
+ ```
96
+
97
+ With command lines in the terminal: GUI <br>
98
+
99
+ ```python
100
+ # Interface generation+reading+saving
101
+ python -m narremgen.gui
102
+ ```
103
+
104
+ ## Other example of call (check exact model names, and documentation for texts)
105
+
106
+ ### OpenAI everywhere as simple default + export TeX booklet
107
+
108
+ narremgen --topic "Small habits, big effects" --output-dir "./out" --default-model "openai\gpt-4o-mini" --export-book-tex
109
+
110
+ ### Ollama local (offline) + TeX output + skip theme analysis
111
+
112
+ narremgen --topic "Home organisation and walking" --output-dir "./out" --default-model "ollama\gemma3:4b" --batches 3 --per-batch 30 --output-format tex --export-book-tex
113
+
114
+ ### OpenRouter mix: DeepSeek for mapping, Llama for narrative, GPT-4o-mini for the rest + multiple variants
115
+
116
+ narremgen --topic "Walk habits in the city" --output-dir "./out" --api-key-file "./llmkeys.txt" --model-advice "openrouter\openai/gpt-4o-mini" --model-mapping "openrouter\deepseek/deepseek-reasoner" --model-context "openrouter\openai/gpt-4o-mini" --model-narrative "openrouter\meta-llama/llama-3.1-70b-instruct" --model-variants-generation "openrouter\openai/gpt-4o-mini"
117
+
118
+ ### Mistral direct (OpenAI-compatible) + themes enabled with custom range and batch size
119
+
120
+ narremgen --topic "Healthy routines for a walk everyday" --output-dir "./out" --api-key-file "./llmkeys.txt" --default-model "mistral\mistral-large-latest" --themes-min 7 --themes-max 12 --themes-batch-size 30
121
+
122
+ ### Grok default + bypass variants generation to local Phi-4 (Ollama) with larger token budget
123
+
124
+ narremgen --topic "Walking around in a small town" --output-dir "./out" --api-key-file "./llmkeys.txt" --default-model "grok\grok-2-latest" --model-variants-generation "ollama\phi4:14b" --variant-batch-size 40 --variant-max-tokens 2500
125
+
126
+ ### Quick connectivity check (no files generated): diagnostic dry-run with longer timeout
127
+
128
+ narremgen --diagnostic-dry-run --model-advice "openrouter\deepseek/deepseek-chat" --request-timeout 90
129
+
130
+ ## Warnings
131
+
132
+ - **Practical note:** very large generations (e.g. high `--batches` × `--per-batch`) may take a long time, may produce repeated advice, and can fail due to rate limits/timeouts or oversized intermediate files (these issues are not handled automatically). Start small (e.g. `--batches 2 --per-batch 30`) and scale up while monitoring API usage.
133
+
134
+ - **Output integrity:** in some cases an LLM response can be malformed (wrong format) or truncated, which may lead to missing or unusable advice entries. If this happens, rerun the affected batch.
135
+
136
+ - **Current usability:** Only informed users or trainers should use this system in practice. <br>
137
+ Some advice may be missing or mistaken du to ia/programming. <br>
138
+ In future automatic checkings may be implemented for end user.
@@ -0,0 +1,63 @@
1
+ [build-system]
2
+ requires = ["setuptools>=68", "wheel"]
3
+ build-backend = "setuptools.build_meta"
4
+
5
+ [project]
6
+ name = "narremgen"
7
+ version = "0.9.3"
8
+ requires-python = ">=3.10"
9
+ dependencies = [
10
+ "requests",
11
+ "pandas",
12
+ "numpy",
13
+ "openai>=1.0.0",
14
+ "google-genai>=1.0.0",
15
+ ]
16
+ readme = { file = "README.md", content-type = "text/markdown" }
17
+ license = "MIT"
18
+ license-files = ["LICEN[CS]E*"]
19
+
20
+ [project.optional-dependencies]
21
+ emotions = [
22
+ "transformers>=4.40",
23
+ "torch",
24
+ "vaderSentiment",
25
+ ]
26
+ textstats = [
27
+ "lexicalrichness",
28
+ ]
29
+ plots = [
30
+ "matplotlib",
31
+ ]
32
+ gui = []
33
+ all = [
34
+ "transformers>=4.40",
35
+ "torch",
36
+ "google-genai>=1.0.0",
37
+ "openai>=1.0.0",
38
+ "lexicalrichness",
39
+ "matplotlib",
40
+ "vaderSentiment",
41
+ "python-docx",
42
+ "reportlab",
43
+ ]
44
+
45
+ [project.scripts]
46
+ narremgen = "narremgen.main:main"
47
+ narremgen-gui = "narremgen.gui:main"
48
+
49
+ [tool.setuptools]
50
+ include-package-data = true
51
+ package-dir = {"" = "src"}
52
+
53
+ [tool.setuptools.package-data]
54
+ narremgen = ["settings/**/*", "data/**/*"]
55
+
56
+ [tool.setuptools.packages.find]
57
+ where = ["src"]
58
+ include = ["narremgen*"]
59
+
60
+ [project.urls]
61
+ Homepage = "https://github.com/narremgen"
62
+ Repository = "https://github.com/narremgen/narremgen"
63
+ Issues = "https://github.com/narremgen/narremgen/issues"
@@ -0,0 +1,4 @@
1
+ [egg_info]
2
+ tag_build =
3
+ tag_date = 0
4
+
@@ -0,0 +1,98 @@
1
+ """
2
+ narremgen
3
+ =========
4
+
5
+ Framework for narrative and emotional text generation based on structured
6
+ narrative schemes (SN) and emotional dynamics (DE).
7
+ """
8
+
9
+ from __future__ import annotations
10
+
11
+ from importlib import import_module
12
+ from typing import Any
13
+
14
+ __version__ = "0.9.3"
15
+ __author__ = "Rodolphe Priam"
16
+ __license__ = "MIT"
17
+ __email__ = "rpriam@gmail.com"
18
+
19
+ __all__ = [
20
+ "LLMConnect",
21
+ "generate_advice",
22
+ "generate_mapping",
23
+ "generate_context",
24
+ "generate_narratives",
25
+ "generate_narratives_batch",
26
+ "build_corpus_for_variants",
27
+ "run_llm_theme_pipeline",
28
+ "run_one_variant_pipeline",
29
+ "run_pipeline",
30
+ "analyze_sn_de_distribution",
31
+ "save_output",
32
+ "safe_generate",
33
+ "merge_and_filter",
34
+ "renumerote_filtered",
35
+ "audit_filtered",
36
+ "validate_mapping",
37
+ "quick_check_filtered",
38
+ ]
39
+
40
+ _LAZY: dict[str, tuple[str, str]] = {
41
+ "LLMConnect": ("llmcore", "LLMConnect"),
42
+ "safe_chat_completion": ("llmcore", "safe_chat_completion"),
43
+ "estimate_tokens": ("llmcore", "estimate_tokens"),
44
+ "generate_advice": ("data", "generate_advice"),
45
+ "generate_mapping": ("data", "generate_mapping"),
46
+ "generate_context": ("data", "generate_context"),
47
+ "generate_narratives": ("narratives", "generate_narratives"),
48
+ "generate_narratives_batch": ("narratives", "generate_narratives_batch"),
49
+ "run_pipeline": ("pipeline", "run_pipeline"),
50
+ "build_corpus_for_variants": ("chapters", "build_corpus_for_variants"),
51
+ "run_llm_theme_pipeline": ("themes", "run_llm_theme_pipeline"),
52
+ "run_one_variant_pipeline": ("variants", "run_one_variant_pipeline"),
53
+ "analyze_sn_de_distribution": ("analyzestats", "analyze_sn_de_distribution"),
54
+ "save_output": ("utils", "save_output"),
55
+ "safe_generate": ("utils", "safe_generate"),
56
+ "merge_and_filter": ("utils", "merge_and_filter"),
57
+ "renumerote_filtered": ("utils", "renumerote_filtered"),
58
+ "audit_filtered": ("utils", "audit_filtered"),
59
+ "validate_mapping": ("utils", "validate_mapping"),
60
+ "quick_check_filtered": ("utils", "quick_check_filtered"),
61
+ }
62
+
63
+ _HINTS: dict[str, str] = {
64
+ "openai": 'pip install ".[llm]"',
65
+ "transformers": 'pip install ".[emotions]"',
66
+ "torch": 'pip install ".[emotions]"',
67
+ "vaderSentiment": 'pip install ".[emotions]"',
68
+ "lexicalrichness": 'pip install ".[textstats]"',
69
+ "matplotlib": 'pip install ".[plots]"',
70
+ "reportlab": 'pip install ".[all]"',
71
+ "docx": 'pip install ".[all]"',
72
+ }
73
+
74
+ def __getattr__(name: str) -> Any:
75
+ if name not in _LAZY:
76
+ raise AttributeError(f"module has no attribute {name!r}")
77
+
78
+ module_name, attr = _LAZY[name]
79
+ try:
80
+ mod = import_module(f".{module_name}", __name__)
81
+ value = getattr(mod, attr)
82
+ globals()[name] = value
83
+ return value
84
+ except ModuleNotFoundError as e:
85
+ missing = getattr(e, "name", None)
86
+ if missing in _HINTS:
87
+ raise ModuleNotFoundError(
88
+ f"Optional dependency missing: {missing!r}. To enable this feature: {_HINTS[missing]}"
89
+ ) from e
90
+ raise
91
+ except AttributeError as e:
92
+ raise AttributeError(
93
+ f"Public symbol {name!r} is mapped to {module_name}.{attr} but it does not exist."
94
+ ) from e
95
+
96
+
97
+ def __dir__() -> list[str]:
98
+ return sorted(set(globals().keys()) | set(_LAZY.keys()))
@@ -0,0 +1,4 @@
1
+ from narremgen.main import main
2
+
3
+ if __name__ == "__main__":
4
+ raise SystemExit(main())