aait 2.3.12__tar.gz → 2.3.12.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (271) hide show
  1. {aait-2.3.12 → aait-2.3.12.2}/PKG-INFO +1 -1
  2. {aait-2.3.12 → aait-2.3.12.2}/aait.egg-info/PKG-INFO +1 -1
  3. {aait-2.3.12 → aait-2.3.12.2}/aait.egg-info/SOURCES.txt +10 -1
  4. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/__init__.py +22 -19
  5. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/llm/answers_llama.py +158 -68
  6. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/llm/lmstudio.py +2 -2
  7. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/llm/process_documents.py +2 -2
  8. aait-2.3.12.2/orangecontrib/AAIT/llm/prompt_management.py +157 -0
  9. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/utils/OperationSystem.py +5 -1
  10. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/utils/shared_functions.py +54 -43
  11. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/utils/subprocess_management.py +7 -3
  12. aait-2.3.12.2/orangecontrib/AAIT/utils/widget_positioning.py +68 -0
  13. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWAddColumns.py +1 -1
  14. aait-2.3.12.2/orangecontrib/AAIT/widgets/OWAutoShowCreateInstance.py +750 -0
  15. aait-2.3.12.2/orangecontrib/AAIT/widgets/OWAutoShowTable.py +670 -0
  16. aait-2.3.12.2/orangecontrib/AAIT/widgets/OWConverseLLM.py +274 -0
  17. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWDisplayMD.py +22 -1
  18. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWExtractTokens.py +12 -2
  19. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWFileWithPath.py +0 -1
  20. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWFusionNM.py +1 -1
  21. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWGenerateWord.py +10 -8
  22. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWInputSelector.py +4 -1
  23. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWLLMEngine.py +23 -8
  24. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWLMStudio.py +200 -190
  25. aait-2.3.12.2/orangecontrib/AAIT/widgets/OWModel_Embeddings.py +88 -0
  26. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWOperationSystem.py +41 -1
  27. aait-2.3.12.2/orangecontrib/AAIT/widgets/OWQuadrantclicker.py +88 -0
  28. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWQueryLLM.py +1 -1
  29. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWSelectColumnDynamique.py +1 -1
  30. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWSelectRowsDynamic.py +19 -37
  31. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWSortAndSelect.py +2 -2
  32. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/ow_OperationSystem.ui +25 -2
  33. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/ow_in_or_out_path.ui +13 -0
  34. aait-2.3.12.2/orangecontrib/AAIT/widgets/designer/owconversellm.ui +363 -0
  35. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owedgellm.ui +1 -1
  36. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owextracttokens.ui +51 -6
  37. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owfilewithpath.ui +6 -6
  38. aait-2.3.12.2/orangecontrib/AAIT/widgets/designer/owquadrant_clicker.ui +276 -0
  39. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owqueryllm.ui +1 -1
  40. aait-2.3.12.2/orangecontrib/AAIT/widgets/icons/CreateInstance.svg +108 -0
  41. aait-2.3.12.2/orangecontrib/AAIT/widgets/icons/Table.svg +86 -0
  42. aait-2.3.12.2/orangecontrib/AAIT/widgets/icons/owmodel_embeddings.svg +25 -0
  43. aait-2.3.12.2/orangecontrib/AAIT/widgets/icons/quadrantclicker.svg +135 -0
  44. {aait-2.3.12 → aait-2.3.12.2}/setup.py +1 -1
  45. aait-2.3.12/orangecontrib/AAIT/llm/prompt_management.py +0 -140
  46. aait-2.3.12/orangecontrib/AAIT/widgets/OWConverseLLM.py +0 -205
  47. aait-2.3.12/orangecontrib/AAIT/widgets/designer/owconversellm.ui +0 -126
  48. {aait-2.3.12 → aait-2.3.12.2}/License.txt +0 -0
  49. {aait-2.3.12 → aait-2.3.12.2}/aait.egg-info/dependency_links.txt +0 -0
  50. {aait-2.3.12 → aait-2.3.12.2}/aait.egg-info/entry_points.txt +0 -0
  51. {aait-2.3.12 → aait-2.3.12.2}/aait.egg-info/namespace_packages.txt +0 -0
  52. {aait-2.3.12 → aait-2.3.12.2}/aait.egg-info/requires.txt +0 -0
  53. {aait-2.3.12 → aait-2.3.12.2}/aait.egg-info/top_level.txt +0 -0
  54. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/fix_torch/fix_torch_dll_error.py +0 -0
  55. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/fix_torch/libomp140.x86_64.dll +0 -0
  56. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/llm/__init__.py +0 -0
  57. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/llm/answers.py +0 -0
  58. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/llm/chunking.py +0 -0
  59. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/llm/embeddings.py +0 -0
  60. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/llm/lemmes.py +0 -0
  61. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/llm/translations.py +0 -0
  62. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/optimiser/__init__.py +0 -0
  63. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/optimiser/optuna_multi.py +0 -0
  64. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/utils/MetManagement.py +0 -0
  65. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/utils/SimpleDialogQt.py +0 -0
  66. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/utils/__init__.py +0 -0
  67. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/utils/aait_repo_file.py +0 -0
  68. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/utils/aait_table_viewer.py +0 -0
  69. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/utils/base_widget.py +0 -0
  70. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/utils/delta_local_shared_fodler.py +0 -0
  71. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/utils/import_uic.py +0 -0
  72. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/utils/initialize_from_ini.py +0 -0
  73. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/utils/mac_utils.py +0 -0
  74. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/utils/shared_variables.py +0 -0
  75. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/utils/thread_management.py +0 -0
  76. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/utils/tools/__init__.py +0 -0
  77. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/utils/tools/change_owcorpus.py +0 -0
  78. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/utils/tools/concat_splitted_pypi.py +0 -0
  79. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/utils/tools/first_time_check.py +0 -0
  80. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/utils/tools/owcorpus_ok.txt +0 -0
  81. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/utils/unlink_table_domain.py +0 -0
  82. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/utils/windows_utils.py +0 -0
  83. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWAAITResourcesManager.py +0 -0
  84. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWAccumulator.py +0 -0
  85. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWApplyRules.py +0 -0
  86. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWCN2rule_view.py +0 -0
  87. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWChunking.py +0 -0
  88. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWConcatRules.py +0 -0
  89. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWCreateEmbeddings.py +0 -0
  90. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWEditTable.py +0 -0
  91. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWEmptySwitch.py +0 -0
  92. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWEndLoop.py +0 -0
  93. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWExecuteScript.py +0 -0
  94. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWExtraChunks.py +0 -0
  95. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWFileMetadata.py +0 -0
  96. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWFileSyncChecker.py +0 -0
  97. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWFindFilesFromDir.py +0 -0
  98. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWGenerateQuestions.py +0 -0
  99. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWGenerateSynthesis.py +0 -0
  100. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWKeywords.py +0 -0
  101. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWKeywordsDetection.py +0 -0
  102. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWLanguageDetection.py +0 -0
  103. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWLargeLanguageModel.py +0 -0
  104. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWLemmatizer.py +0 -0
  105. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWLoadDocuments.py +0 -0
  106. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWModel_CE_MiniLML6.py +0 -0
  107. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWModel_Falcon.py +0 -0
  108. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWModel_HelsinkiEnFr.py +0 -0
  109. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWModel_HelsinkiFrEn.py +0 -0
  110. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWModel_MPNET.py +0 -0
  111. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWModel_Mistral.py +0 -0
  112. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWModel_Qwen.py +0 -0
  113. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWModel_Qwen1B5_Q6.py +0 -0
  114. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWModel_Qwen2_5_32B.py +0 -0
  115. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWModel_Qwen3B_Q4.py +0 -0
  116. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWModel_Qwen7B_Q4.py +0 -0
  117. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWModel_Qwen7B_Q6.py +0 -0
  118. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWModel_Solar.py +0 -0
  119. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWModel_SolarUncensored.py +0 -0
  120. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWModel_SpacyMD_EN.py +0 -0
  121. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWModel_SpacyMD_FR.py +0 -0
  122. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWOptimisation.py +0 -0
  123. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWOptimisationSelection.py +0 -0
  124. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWOptimisationSendScore.py +0 -0
  125. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWProcessDocumentsFromPath.py +0 -0
  126. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWRandomData.py +0 -0
  127. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWReranking.py +0 -0
  128. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWSaveFilepathEntry.py +0 -0
  129. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWStartLoop.py +0 -0
  130. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWTable2Corpus.py +0 -0
  131. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWTranslation.py +0 -0
  132. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWTrigger.py +0 -0
  133. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/OWUnzipFolder.py +0 -0
  134. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/POW_Wfactory.py +0 -0
  135. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/__init__.py +0 -0
  136. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owMarkdown.ui +0 -0
  137. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/ow_widget_random_data.ui +0 -0
  138. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owaccumulator.ui +0 -0
  139. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owaddcolumns.ui +0 -0
  140. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owapplyrules.ui +0 -0
  141. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owchunking.ui +0 -0
  142. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owconcatrules.ui +0 -0
  143. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owembeddings.ui +0 -0
  144. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owemptyswitch.ui +0 -0
  145. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owendloop.ui +0 -0
  146. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owexecutescript.ui +0 -0
  147. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owexecutescript_TEST.ui +0 -0
  148. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owextrachunks.ui +0 -0
  149. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owfilemetadata.ui +0 -0
  150. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owfilesyncchecker.ui +0 -0
  151. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owfindfilesfromdir.ui +0 -0
  152. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owfusion_nm.ui +0 -0
  153. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owgenerate_word.ui +0 -0
  154. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owgeneratequestions.ui +0 -0
  155. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owgeneratesynthesis.ui +0 -0
  156. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owkeyword.ui +0 -0
  157. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owkeywordsdetection.ui +0 -0
  158. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owlangdetect.ui +0 -0
  159. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owlargelanguagemodel.ui +0 -0
  160. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owlemmatizer.ui +0 -0
  161. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owllm4all.ui +0 -0
  162. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owloaddocuments.ui +0 -0
  163. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owloadworkflow.ui +0 -0
  164. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owmodel_ce_minilml6.ui +0 -0
  165. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owmodel_falcon.ui +0 -0
  166. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owmodel_helsinki_en_fr.ui +0 -0
  167. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owmodel_helsinki_fr_en.ui +0 -0
  168. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owmodel_mistral.ui +0 -0
  169. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owmodel_mpnet.ui +0 -0
  170. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owmodel_qwen.ui +0 -0
  171. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owmodel_qwen_2.5_32b.ui +0 -0
  172. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owmodel_qwen_instruct_1.5b_q6.ui +0 -0
  173. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owmodel_qwen_instruct_3b_q4.ui +0 -0
  174. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owmodel_qwen_instruct_7b_q4.ui +0 -0
  175. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owmodel_qwen_instruct_7b_q6.ui +0 -0
  176. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owmodel_qwencoder_7b.ui +0 -0
  177. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owmodel_solar.ui +0 -0
  178. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owmodel_solar_uncensored.ui +0 -0
  179. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owmodel_spacymd_en.ui +0 -0
  180. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owmodel_spacymd_fr.ui +0 -0
  181. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/ownumberpointinrules.ui +0 -0
  182. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owprocessdocuments.ui +0 -0
  183. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owreranking.ui +0 -0
  184. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owsavewithpath.ui +0 -0
  185. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owselect_column_dynamic.ui +0 -0
  186. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owselect_row_dynamic.ui +0 -0
  187. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owsortandselect.ui +0 -0
  188. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owstartloop.ui +0 -0
  189. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owtable2corpus.ui +0 -0
  190. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owtranslation.ui +0 -0
  191. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owtrigger.ui +0 -0
  192. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/designer/owunzipfolder.ui +0 -0
  193. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/CN2RuleViewer.svg +0 -0
  194. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/MDViewer.png +0 -0
  195. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/Mistral.png +0 -0
  196. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/apply_rules.svg +0 -0
  197. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/blue_down_arrow.svg +0 -0
  198. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/category.svg +0 -0
  199. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/dark_green.txt +0 -0
  200. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/de.png +0 -0
  201. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/document_generator.png +0 -0
  202. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/documents.png +0 -0
  203. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/edge_llm.svg +0 -0
  204. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/endloop.png +0 -0
  205. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/extra_chunks.png +0 -0
  206. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/green_check.svg +0 -0
  207. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/in_or_out.png +0 -0
  208. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/input.png +0 -0
  209. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/languages.png +0 -0
  210. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/lm_studio.png +0 -0
  211. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/local_interf_img_multi_pull.svg +0 -0
  212. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/local_interf_multi_pull.svg +0 -0
  213. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/local_interf_pull.svg +0 -0
  214. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/local_interf_push.svg +0 -0
  215. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/local_interf_text_pull.svg +0 -0
  216. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/logo_solar.svg +0 -0
  217. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/logo_uncensoredsolar.svg +0 -0
  218. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/logo_upload.png +0 -0
  219. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/models.png +0 -0
  220. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/operationSystem.png +0 -0
  221. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/optimisation.png +0 -0
  222. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/optimizer.png +0 -0
  223. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/output.png +0 -0
  224. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/owCN2_intersect_rules.svg +0 -0
  225. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/owaccumulator.png +0 -0
  226. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/owchunking.png +0 -0
  227. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/owconversellm.svg +0 -0
  228. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/owedittable.svg +0 -0
  229. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/owembeddings.svg +0 -0
  230. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/owemptyswitch.svg +0 -0
  231. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/owenvinfo.png +0 -0
  232. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/owexecutescript.svg +0 -0
  233. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/owextracttokens.svg +0 -0
  234. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/owfilemetadata.svg +0 -0
  235. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/owfilesfromdir.svg +0 -0
  236. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/owfilesyncchecker.svg +0 -0
  237. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/owfilewithpath.svg +0 -0
  238. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/owfusion_nm.png +0 -0
  239. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/owgeneratequestions.svg +0 -0
  240. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/owgeneratesynthesis.png +0 -0
  241. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/owkeywords.png +0 -0
  242. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/owkeywordsdetection.png +0 -0
  243. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/owlargelanguagemodel.svg +0 -0
  244. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/owlemmatizer.svg +0 -0
  245. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/owloaddocuments.svg +0 -0
  246. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/owmodel_ce_minilml6.svg +0 -0
  247. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/owmodel_falcon.png +0 -0
  248. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/owmodel_helsinki_en_fr.svg +0 -0
  249. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/owmodel_helsinki_fr_en.svg +0 -0
  250. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/owmodel_mpnet.svg +0 -0
  251. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/owmodel_spacymd_en.svg +0 -0
  252. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/owmodel_spacymd_fr.svg +0 -0
  253. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/owoptimisation.svg +0 -0
  254. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/owoptimisationselection.png +0 -0
  255. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/owqueryllm.svg +0 -0
  256. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/owreranking.svg +0 -0
  257. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/owsavefilepathentry.svg +0 -0
  258. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/owselectcolumndynamique.png +0 -0
  259. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/owsortandselect.svg +0 -0
  260. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/owtable2corpus.svg +0 -0
  261. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/owtranslation.svg +0 -0
  262. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/owtrigger.svg +0 -0
  263. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/processdocuments.svg +0 -0
  264. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/qwen-color.png +0 -0
  265. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/select_dynamic_row.png +0 -0
  266. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/startloop.png +0 -0
  267. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/tools.png +0 -0
  268. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/widgetFactory.svg +0 -0
  269. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/AAIT/widgets/icons/zip.svg +0 -0
  270. {aait-2.3.12 → aait-2.3.12.2}/orangecontrib/__init__.py +0 -0
  271. {aait-2.3.12 → aait-2.3.12.2}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: aait
3
- Version: 2.3.12
3
+ Version: 2.3.12.2
4
4
  Summary: Advanced Artificial Intelligence Tools is a package meant to develop and enable advanced AI functionalities in Orange
5
5
  Home-page:
6
6
  Author: Orange community
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: aait
3
- Version: 2.3.12
3
+ Version: 2.3.12.2
4
4
  Summary: Advanced Artificial Intelligence Tools is a package meant to develop and enable advanced AI functionalities in Orange
5
5
  Home-page:
6
6
  Author: Orange community
@@ -1,5 +1,4 @@
1
1
  License.txt
2
- setup.cfg
3
2
  setup.py
4
3
  aait.egg-info/PKG-INFO
5
4
  aait.egg-info/SOURCES.txt
@@ -40,6 +39,7 @@ orangecontrib/AAIT/utils/shared_variables.py
40
39
  orangecontrib/AAIT/utils/subprocess_management.py
41
40
  orangecontrib/AAIT/utils/thread_management.py
42
41
  orangecontrib/AAIT/utils/unlink_table_domain.py
42
+ orangecontrib/AAIT/utils/widget_positioning.py
43
43
  orangecontrib/AAIT/utils/windows_utils.py
44
44
  orangecontrib/AAIT/utils/tools/__init__.py
45
45
  orangecontrib/AAIT/utils/tools/change_owcorpus.py
@@ -50,6 +50,8 @@ orangecontrib/AAIT/widgets/OWAAITResourcesManager.py
50
50
  orangecontrib/AAIT/widgets/OWAccumulator.py
51
51
  orangecontrib/AAIT/widgets/OWAddColumns.py
52
52
  orangecontrib/AAIT/widgets/OWApplyRules.py
53
+ orangecontrib/AAIT/widgets/OWAutoShowCreateInstance.py
54
+ orangecontrib/AAIT/widgets/OWAutoShowTable.py
53
55
  orangecontrib/AAIT/widgets/OWCN2rule_view.py
54
56
  orangecontrib/AAIT/widgets/OWChunking.py
55
57
  orangecontrib/AAIT/widgets/OWConcatRules.py
@@ -80,6 +82,7 @@ orangecontrib/AAIT/widgets/OWLargeLanguageModel.py
80
82
  orangecontrib/AAIT/widgets/OWLemmatizer.py
81
83
  orangecontrib/AAIT/widgets/OWLoadDocuments.py
82
84
  orangecontrib/AAIT/widgets/OWModel_CE_MiniLML6.py
85
+ orangecontrib/AAIT/widgets/OWModel_Embeddings.py
83
86
  orangecontrib/AAIT/widgets/OWModel_Falcon.py
84
87
  orangecontrib/AAIT/widgets/OWModel_HelsinkiEnFr.py
85
88
  orangecontrib/AAIT/widgets/OWModel_HelsinkiFrEn.py
@@ -100,6 +103,7 @@ orangecontrib/AAIT/widgets/OWOptimisation.py
100
103
  orangecontrib/AAIT/widgets/OWOptimisationSelection.py
101
104
  orangecontrib/AAIT/widgets/OWOptimisationSendScore.py
102
105
  orangecontrib/AAIT/widgets/OWProcessDocumentsFromPath.py
106
+ orangecontrib/AAIT/widgets/OWQuadrantclicker.py
103
107
  orangecontrib/AAIT/widgets/OWQueryLLM.py
104
108
  orangecontrib/AAIT/widgets/OWRandomData.py
105
109
  orangecontrib/AAIT/widgets/OWReranking.py
@@ -167,6 +171,7 @@ orangecontrib/AAIT/widgets/designer/owmodel_spacymd_en.ui
167
171
  orangecontrib/AAIT/widgets/designer/owmodel_spacymd_fr.ui
168
172
  orangecontrib/AAIT/widgets/designer/ownumberpointinrules.ui
169
173
  orangecontrib/AAIT/widgets/designer/owprocessdocuments.ui
174
+ orangecontrib/AAIT/widgets/designer/owquadrant_clicker.ui
170
175
  orangecontrib/AAIT/widgets/designer/owqueryllm.ui
171
176
  orangecontrib/AAIT/widgets/designer/owreranking.ui
172
177
  orangecontrib/AAIT/widgets/designer/owsavewithpath.ui
@@ -179,8 +184,10 @@ orangecontrib/AAIT/widgets/designer/owtranslation.ui
179
184
  orangecontrib/AAIT/widgets/designer/owtrigger.ui
180
185
  orangecontrib/AAIT/widgets/designer/owunzipfolder.ui
181
186
  orangecontrib/AAIT/widgets/icons/CN2RuleViewer.svg
187
+ orangecontrib/AAIT/widgets/icons/CreateInstance.svg
182
188
  orangecontrib/AAIT/widgets/icons/MDViewer.png
183
189
  orangecontrib/AAIT/widgets/icons/Mistral.png
190
+ orangecontrib/AAIT/widgets/icons/Table.svg
184
191
  orangecontrib/AAIT/widgets/icons/apply_rules.svg
185
192
  orangecontrib/AAIT/widgets/icons/blue_down_arrow.svg
186
193
  orangecontrib/AAIT/widgets/icons/category.svg
@@ -232,6 +239,7 @@ orangecontrib/AAIT/widgets/icons/owlargelanguagemodel.svg
232
239
  orangecontrib/AAIT/widgets/icons/owlemmatizer.svg
233
240
  orangecontrib/AAIT/widgets/icons/owloaddocuments.svg
234
241
  orangecontrib/AAIT/widgets/icons/owmodel_ce_minilml6.svg
242
+ orangecontrib/AAIT/widgets/icons/owmodel_embeddings.svg
235
243
  orangecontrib/AAIT/widgets/icons/owmodel_falcon.png
236
244
  orangecontrib/AAIT/widgets/icons/owmodel_helsinki_en_fr.svg
237
245
  orangecontrib/AAIT/widgets/icons/owmodel_helsinki_fr_en.svg
@@ -249,6 +257,7 @@ orangecontrib/AAIT/widgets/icons/owtable2corpus.svg
249
257
  orangecontrib/AAIT/widgets/icons/owtranslation.svg
250
258
  orangecontrib/AAIT/widgets/icons/owtrigger.svg
251
259
  orangecontrib/AAIT/widgets/icons/processdocuments.svg
260
+ orangecontrib/AAIT/widgets/icons/quadrantclicker.svg
252
261
  orangecontrib/AAIT/widgets/icons/qwen-color.png
253
262
  orangecontrib/AAIT/widgets/icons/select_dynamic_row.png
254
263
  orangecontrib/AAIT/widgets/icons/startloop.png
@@ -4,24 +4,24 @@ from packaging import version
4
4
  import sys
5
5
  import re
6
6
  import os
7
- def check_executable_path():
8
- exe = sys.executable
9
-
10
- # caractères spéciaux interdits (tu peux en ajouter)
11
- forbidden = r'[^A-Za-z0-9_\-./:\\]' # tout ce qui n'est PAS ce set
12
- pattern = re.compile(forbidden)
13
-
14
- if " " in exe or pattern.search(exe):
15
- return False
16
- return True
17
-
18
- if not check_executable_path():
19
- if "site-packages/Orange/widgets" in os.path.dirname(os.path.abspath(__file__)).replace("\\", "/"):
20
- from orangecontrib.AAIT.utils import SimpleDialogQt
21
- else:
22
- from orangecontrib.AAIT.utils import SimpleDialogQt
23
- SimpleDialogQt.BoxError("You must install this program in a path that does not contain spaces or special characters.")
24
- exit(0)
7
+ # def check_executable_path():
8
+ # exe = sys.executable
9
+ #
10
+ # # caractères spéciaux interdits (tu peux en ajouter)
11
+ # forbidden = r'[^A-Za-z0-9_\-./:\\]' # tout ce qui n'est PAS ce set
12
+ # pattern = re.compile(forbidden)
13
+ #
14
+ # if " " in exe or pattern.search(exe):
15
+ # return False
16
+ # return True
17
+ #
18
+ # if not check_executable_path():
19
+ # if "site-packages/Orange/widgets" in os.path.dirname(os.path.abspath(__file__)).replace("\\", "/"):
20
+ # from orangecontrib.AAIT.utils import SimpleDialogQt
21
+ # else:
22
+ # from orangecontrib.AAIT.utils import SimpleDialogQt
23
+ # SimpleDialogQt.BoxError("You must install this program in a path that does not contain spaces or special characters.")
24
+ # exit(0)
25
25
 
26
26
  def check_executable_length(max_length=260):
27
27
  exe = sys.executable
@@ -93,7 +93,10 @@ else: # Execute the file
93
93
  """
94
94
  gc.collect()
95
95
  objects = gc.get_objects()
96
- widget_discoveries = [obj for obj in objects if isinstance(obj, WidgetDiscovery)]
96
+ try:
97
+ widget_discoveries = [obj for obj in objects if isinstance(obj, WidgetDiscovery)]
98
+ except Exception as e:
99
+ print(e)
97
100
  return widget_discoveries
98
101
 
99
102
  def import_proprietary_categories():
@@ -97,6 +97,14 @@ def check_gpu(model_path, argself):
97
97
  return
98
98
 
99
99
 
100
+ def count_tokens(model: Llama, text: str) -> int:
101
+ """
102
+ Count the number of tokens in a text, for a chosen model.
103
+ """
104
+ tokens = model.tokenize(text.encode("utf-8"))
105
+ return len(tokens)
106
+
107
+
100
108
  def load_model(model_path, use_gpu, n_ctx=10000):
101
109
  """
102
110
  Charge un modèle GGUF avec llama_cpp.Llama.
@@ -126,7 +134,7 @@ def load_model(model_path, use_gpu, n_ctx=10000):
126
134
  use_mmap=True,
127
135
  use_mlock=False,
128
136
  embedding=False,
129
- verbose=False,
137
+ verbose=True, #hahahahahaha remettre a False
130
138
  )
131
139
  return model
132
140
  except Exception as e:
@@ -146,7 +154,8 @@ def generate_answers(table, model_path, use_gpu=False, n_ctx=4096, query_paramet
146
154
 
147
155
  # Chargement modèle (llama_cpp)
148
156
  model = load_model(model_path=model_path, use_gpu=use_gpu, n_ctx=n_ctx)
149
-
157
+ if model is None:
158
+ return None
150
159
  # Paramètres de génération par défaut
151
160
  if query_parameters is None:
152
161
  query_parameters = {"max_tokens": 4096, "temperature": 0.4, "top_p": 0.4, "top_k": 40, "repeat_penalty": 1.15}
@@ -170,6 +179,8 @@ def generate_answers(table, model_path, use_gpu=False, n_ctx=4096, query_paramet
170
179
  system_prompt=system_prompt
171
180
  )
172
181
 
182
+ prompt = handle_context_length(prompt, model, n_ctx, method="truncate", progress_callback=progress_callback)
183
+
173
184
  answer = run_query(
174
185
  prompt,
175
186
  model=model,
@@ -185,8 +196,8 @@ def generate_answers(table, model_path, use_gpu=False, n_ctx=4096, query_paramet
185
196
 
186
197
  if answer == "":
187
198
  answer = (
188
- "Error: The answer could not be generated. The model architecture you tried to use is most "
189
- f"likely not supported yet.\n\nModel name: {ntpath.basename(model_path)}"
199
+ "Error: The answer could not be generated. Your prompt might be too long, or the model architecture you tried to use is possibly "
200
+ f"not supported yet.\n\nModel name: {ntpath.basename(model_path)}"
190
201
  )
191
202
 
192
203
  thinking = ""
@@ -199,11 +210,11 @@ def generate_answers(table, model_path, use_gpu=False, n_ctx=4096, query_paramet
199
210
 
200
211
  if progress_callback is not None:
201
212
  progress_value = float(100 * (i + 1) / len(data))
202
- progress_callback((progress_value, "\n\n\n\n"))
213
+ progress_callback(("progressBar", progress_value))
203
214
 
204
215
  if argself is not None and getattr(argself, "stop", False):
205
216
  break
206
- except ValueError as e:
217
+ except Exception as e:
207
218
  print("An error occurred when trying to generate an answer:", e)
208
219
  return
209
220
 
@@ -278,7 +289,7 @@ def run_query(prompt, model, max_tokens=4096, temperature=0, top_p=0, top_k=40,
278
289
  # - On utilise create_completion (prompt-style) pour rester compatible avec ton templating actuel.
279
290
  # - Le générateur renvoie des chunks contenant choices[0].text.
280
291
  try:
281
- stream = model.create_completion(prompt=prompt, **gen_kwargs)
292
+ stream = model(prompt=prompt, **gen_kwargs)
282
293
 
283
294
  for chunk in stream:
284
295
  # Récupérer le texte incrémental
@@ -296,7 +307,7 @@ def run_query(prompt, model, max_tokens=4096, temperature=0, top_p=0, top_k=40,
296
307
  write_tokens_to_file(token, workflow_id)
297
308
 
298
309
  if progress_callback is not None:
299
- progress_callback((None, token))
310
+ progress_callback(("assistant", token))
300
311
 
301
312
  if argself is not None and getattr(argself, "stop", False):
302
313
  # Arrêt demandé de l'extérieur
@@ -314,63 +325,142 @@ def run_query(prompt, model, max_tokens=4096, temperature=0, top_p=0, top_k=40,
314
325
  return answer
315
326
 
316
327
 
317
- # def generate_conversation(table, model, model_path, conversation="", progress_callback=None, argself=None):
318
- # """
319
- # Generates a response using a language model and appends it to a conversation.
320
- #
321
- # Parameters:
322
- # ----------
323
- # table (Orange.data.Table) Input data table. The first row should contain at least a "prompt" column, and optionally
324
- # "system prompt" and "assistant prompt" columns for context.
325
- #
326
- # model (GPT4All) : Loaded language model instance, compatible with GPT4All or llama.cpp-style interfaces.
327
- #
328
- # model_path (str) : Path or name of the model, used for selecting the appropriate prompt template.
329
- #
330
- # conversation (str, optional) : Existing conversation string to append the new response to. Defaults to an empty string.
331
- #
332
- # progress_callback (callable, optional) : Callback function for UI updates during generation. Called with progress percentage and message.
333
- #
334
- # argself (object, optional) : Extra argument passed to `run_query`, typically the widget instance (used for context or settings).
335
- #
336
- # Returns:
337
- # -------
338
- # Orange.data.Table
339
- # A new Orange Table containing the original input row with two new meta columns:
340
- # - "Answer": the model's generated response.
341
- # - "Conversation": the updated full conversation string.
342
- # """
343
- # if table is None:
344
- # return
345
- #
346
- # # Copy of input data
347
- # data = copy.deepcopy(table)
348
- # attr_dom = list(data.domain.attributes)
349
- # metas_dom = list(data.domain.metas)
350
- # class_dom = list(data.domain.class_vars)
351
- #
352
- # features = list(data[0])
353
- # metas = list(data.metas[0])
354
- #
355
- # # Get data from the first row of the Data Table
356
- # system_prompt = data[0]["system prompt"].value if "system prompt" in data.domain else ""
357
- # assistant_prompt = data[0]["assistant prompt"].value if "assistant prompt" in data.domain else ""
358
- # user_prompt = data[0]["prompt"].value
359
- #
360
- # # Build prompt based on the model name
361
- # prompt = prompt_management.apply_prompt_template(model_path, user_prompt=user_prompt, assistant_prompt=assistant_prompt, system_prompt=system_prompt)
362
- # answer = run_query(prompt, model=model, argself=argself, progress_callback=progress_callback)
363
- # conversation += "### Assistant :\n\n" + answer + "\n\n\n\n"
364
- #
365
- # # Add spaces to the widget for following answers
366
- # progress_callback((100, "\n\n\n\n"))
367
- #
368
- # # Generate new Domain to add to data
369
- # metas += [answer, conversation]
370
- # row = [features + metas]
371
- # answer_dom = [StringVariable("Answer"), StringVariable("Conversation")]
372
- #
373
- # # Create and return table
374
- # domain = Domain(attributes=attr_dom, metas=metas_dom + answer_dom, class_vars=class_dom)
375
- # out_data = Table.from_list(domain=domain, rows=row)
376
- # return out_data
328
+ def generate_conversation(table, model, conversation=None, n_ctx=32768, progress_callback=None, argself=None):
329
+ """
330
+ Generates a response using a language model and appends it to a conversation.
331
+
332
+ Parameters:
333
+ ----------
334
+ table (Orange.data.Table) Input data table. The first row should contain at least a "prompt" column, and optionally
335
+ "system prompt" and "assistant prompt" columns for context.
336
+ model (Llama) : Loaded language model instance, compatible with GPT4All or llama.cpp-style interfaces.
337
+ widget (OWConverseLLM) : The widget in which to display the conversation.
338
+ conversation (list, optional) : Existing conversation string to append the new response to. Defaults to an empty string.
339
+ progress_callback (callable, optional) : Callback function for UI updates during generation. Called with progress percentage and message.
340
+ argself (object, optional) : Extra argument passed to `run_query`, typically the widget instance (used for context or settings).
341
+
342
+ Returns:
343
+ -------
344
+ Orange.data.Table
345
+ A new Orange Table containing the original input row with two new meta columns:
346
+ - "Answer": the model's generated response.
347
+ - "Conversation": the updated full conversation string.
348
+ """
349
+ if table is None:
350
+ return
351
+
352
+ # Copy the input data
353
+ data = copy.deepcopy(table)
354
+ L = len(data)
355
+
356
+ # Get data from the first row of the Data Table
357
+ system_prompt = data[0]["system prompt"].value if "system prompt" in data.domain else ""
358
+ user_prompt = data[0]["prompt"].value
359
+ assistant_prompt = data[0]["assistant prompt"].value if "assistant prompt" in data.domain else ""
360
+
361
+ # Get model path to determine prompt template
362
+ model_path = model.model_path
363
+
364
+ prompt = ""
365
+ # If the conversation has not started, add a default system prompt
366
+ if not conversation:
367
+ conversation = [{"role": "system", "content": system_prompt}]
368
+
369
+ # If the conversation has started, build the complete context
370
+ else:
371
+ for message in conversation:
372
+ if message["role"] == "system":
373
+ prompt += prompt_management.apply_system_template(model_path, message["content"])
374
+ elif message["role"] == "user":
375
+ prompt += prompt_management.apply_user_template(model_path, message["content"])
376
+ elif message["role"] == "assistant":
377
+ prompt += prompt_management.apply_assistant_template(model_path, message["content"])
378
+
379
+ # Add the current user prompt & assistant prompt
380
+ prompt += prompt_management.apply_user_template(model_path, user_prompt)
381
+ prompt += prompt_management.apply_assistant_template(model_path, assistant_prompt)
382
+
383
+ prompt = handle_context_length(prompt, model, n_ctx, method="truncate", progress_callback=progress_callback)
384
+
385
+ # Return progression to fill the user & assistant cards
386
+ progress_callback(("user", user_prompt))
387
+ progress_callback(("assistant", assistant_prompt))
388
+
389
+ # Append the user message to the conversation
390
+ conversation.append({"role": "user", "content": user_prompt})
391
+
392
+ # Generate the answer
393
+ answer = run_query(prompt, model=model, argself=argself, progress_callback=progress_callback)
394
+
395
+ # Split the thinking block and remove it from the answer
396
+ think_text, final_answer = split_think(answer=answer)
397
+
398
+ # Append the answer to the conversation
399
+ conversation.append({"role": "assistant", "content": assistant_prompt + final_answer})
400
+
401
+ # End of the progress bar
402
+ progress_callback(("progressBar", 100))
403
+
404
+ # Padding to add columns to the table, TEMPORARY # TODO : process the entire table for conversation
405
+ rows_answer = [final_answer]
406
+ rows_answer += [""] * (L - len(rows_answer))
407
+ rows_think = [think_text]
408
+ rows_think += [""] * (L - len(rows_think))
409
+ rows_conv = ["To be implemented"]
410
+ rows_conv += [""] * (L - len(rows_conv))
411
+
412
+ # Generate new Domain to add to data
413
+ var_answer = StringVariable("Answer")
414
+ var_conversation = StringVariable("Conversation")
415
+ var_think = StringVariable("Think")
416
+
417
+ # Add columns to the table
418
+ data = data.add_column(var_answer, rows_answer, to_metas=True)
419
+ data = data.add_column(var_conversation, rows_conv, to_metas=True)
420
+ data = data.add_column(var_think, rows_think, to_metas=True)
421
+
422
+ return data, conversation
423
+
424
+
425
+ def split_think(answer: str):
426
+ # Extract think content (if any)
427
+ think_match = re.search(r"<think>(.*?)</think>", answer, flags=re.DOTALL)
428
+ think_text = think_match.group(1).strip() if think_match else ""
429
+ # Remove think block from the final answer
430
+ final_answer = re.sub(r"<think>.*?</think>", "", answer, flags=re.DOTALL).strip()
431
+ return think_text, final_answer
432
+
433
+
434
+ def conversation_to_text(conversation):
435
+ if not conversation:
436
+ return ""
437
+ else:
438
+ pass #TODO
439
+
440
+
441
+ def handle_context_length(prompt, model, n_ctx, method="truncate", margin=0, progress_callback=None):
442
+ """
443
+ Truncate a prompt to fit within n_ctx tokens, leaving margin for generation.
444
+ Safely handles edge cases where limit <= 0.
445
+ """
446
+ # Keep a margin for generated tokens
447
+ limit = max(n_ctx - margin, 0) # clamp to at least 0
448
+
449
+ if method == "truncate":
450
+ tokens = model.tokenize(prompt.encode("utf-8")) # pass string, not bytes
451
+ initial_length = len(tokens)
452
+ if initial_length > limit:
453
+ # take last `limit` tokens safely
454
+ tokens = tokens[-limit:] if limit > 0 else []
455
+ truncated_length = len(tokens)
456
+ prompt = model.detokenize(tokens).decode("utf-8") if tokens else ""
457
+ if progress_callback:
458
+ warning = (
459
+ f"Complete prompt contains {initial_length} tokens - context limit is {limit}. "
460
+ f"The {truncated_length} last tokens have been kept in the prompt."
461
+ )
462
+ progress_callback(("warning", warning))
463
+ return prompt
464
+ else:
465
+ return prompt
466
+
@@ -47,7 +47,7 @@ def appel_lmstudio(prompt, model, stream=False, temperature=0.7, max_tokens=4096
47
47
 
48
48
  # Vérification de la réponse
49
49
  if response.status_code != 200:
50
- raise Exception("Erreur dans la requête")
50
+ raise Exception("Error in the request")
51
51
 
52
52
  # Initialisation de la variable de contenu
53
53
  content = ""
@@ -75,6 +75,6 @@ def appel_lmstudio(prompt, model, stream=False, temperature=0.7, max_tokens=4096
75
75
  return content
76
76
 
77
77
  except Exception as e:
78
- print(f"Erreur : {e}")
78
+ print(f"Error : {e}")
79
79
  return None
80
80
 
@@ -192,10 +192,10 @@ def extract_text(filepath):
192
192
  return extract_text_from_pdf(filepath)
193
193
  elif file_extension == ".docx":
194
194
  return extract_text_from_docx(filepath)
195
- elif file_extension in [".txt", ".md"]:
195
+ elif file_extension in [".txt", ".md", ".py", ".html", ".json", ".ows"]:
196
196
  return extract_text_from_txt(filepath)
197
197
  else:
198
- return "ERROR: Unsupported file format. Please use a .pdf, .docx, .txt, or .md file."
198
+ return "ERROR: Unsupported file format. Please use a .pdf, .docx, .txt, .md, .py, .html, .ows or .json file."
199
199
  except Exception as e:
200
200
  print(f"Erreur lors de l'extraction de texte depuis {filepath}: {e}")
201
201
  return f"ERROR: Extraction Error ({e})"
@@ -0,0 +1,157 @@
1
+ import os
2
+
3
+ prompt_templates = {
4
+ "llama": {
5
+ "system": "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n{system_prompt}<|eot_id|>",
6
+ "user": "<|start_header_id|>user<|end_header_id|>\n{user_prompt}<|eot_id|>",
7
+ "assistant": "<|start_header_id|>assistant<|end_header_id|>\n{assistant_prompt}"
8
+ },
9
+
10
+ "mistral": {
11
+ "system": "{system_prompt}\n",
12
+ "user": "<s>[INST] {user_prompt} [/INST]</s>\n",
13
+ "assistant": "{assistant_prompt}"
14
+ },
15
+
16
+ "solar": {
17
+ "system": "{system_prompt}\n",
18
+ "user": "### User: {user_prompt}\n",
19
+ "assistant": "### Assistant: {assistant_prompt}"
20
+ },
21
+
22
+ "deepseek": {
23
+ "system": "{system_prompt}\n",
24
+ "user": "### Instruction: {user_prompt}\n",
25
+ "assistant": "### Response: {assistant_prompt}"
26
+ },
27
+
28
+ "qwen": {
29
+ "system": "<|im_start|>system\n{system_prompt}<|im_end|>\n",
30
+ "user": "<|im_start|>user\n{user_prompt}<|im_end|>\n",
31
+ "assistant": "<|im_start|>assistant\n{assistant_prompt}"
32
+ },
33
+
34
+ "gemma": {
35
+ "system": "{system_prompt}\n",
36
+ "user": "<start_of_turn>user\n{user_prompt}<end_of_turn>\n",
37
+ "assistant": "<start_of_turn>model\n{assistant_prompt}\n"
38
+ },
39
+
40
+ "granite": {
41
+ "system": "<|system|>\n{system_prompt}\n",
42
+ "user": "<|user|>\n{user_prompt}\n",
43
+ "assistant": "<|assistant|>\n{assistant_prompt}"
44
+ },
45
+
46
+ "phi": {
47
+ "system": "<|im_start|>system<|im_sep|>\n{system_prompt}<|im_end|>\n",
48
+ "user": "<|im_start|>user<|im_sep|>\n{user_prompt}<|im_end|>\n",
49
+ "assistant": "<|im_start|>assistant<|im_sep|>\n{assistant_prompt}"
50
+ },
51
+
52
+ "default": {
53
+ "system": "{system_prompt}",
54
+ "user": "{user_prompt}",
55
+ "assistant": "{assistant_prompt}"
56
+ }
57
+ }
58
+
59
+
60
+
61
+ stop_tokens = {
62
+ "llama": "<|eot_id|>",
63
+ "mistral": "</s>",
64
+ "qwen": "<|im_end|>",
65
+ "gemma": "<end_of_turn>",
66
+ "granite": "<|endoftext|>",
67
+ "phi": "<|im_end|>"
68
+ }
69
+
70
+
71
+ model_types = {
72
+ "solar-10.7b-instruct-v1.0.Q6_K.gguf": "solar",
73
+ "solar-10.7b-instruct-v1.0-uncensored.Q6_K.gguf": "solar",
74
+ "Mistral-7B-Instruct-v0.3.Q6_K.gguf": "mistral",
75
+ "Qwen2.5.1-Coder-7B-Instruct-Q6_K.gguf": "qwen",
76
+ "qwen2.5-3b-instruct-q4_k_m.gguf": "qwen",
77
+ "deepseek-coder-6.7b-instruct.Q6_K.gguf": "deepseek"
78
+ }
79
+
80
+ model_keywords = ["qwen", "solar", "mistral", "llama", "deepseek", "gemma", "granite", "phi"]
81
+
82
+
83
+
84
+ def get_model_type(model_path):
85
+ model_name = os.path.basename(model_path)
86
+ model_type = model_types.get(model_name)
87
+ if model_type is None:
88
+ model_type = next((keyword for keyword in model_keywords if keyword in model_name.lower()), None)
89
+ if model_type is None:
90
+ model_type = "default"
91
+ return model_type
92
+
93
+
94
+ def apply_prompt_template(model_path, user_prompt, assistant_prompt="", system_prompt=""):
95
+ """
96
+ Apply a prompt template based on the given model name and user input.
97
+
98
+ Parameters:
99
+ model_path (str): The name of the model used to determine its type.
100
+ user_prompt (str): The user input or request to embed into the prompt.
101
+ assistant_prompt (str, optional): The assistant's beginning of response, if any. Defaults to an empty string.
102
+ system_prompt (str, optional): A system-level instruction or context to include in the prompt. Defaults to an empty string.
103
+
104
+ Returns:
105
+ str: The formatted prompt that is ready to be passed to the model.
106
+ """
107
+ # Try to identify the model's type
108
+ model_type = get_model_type(model_path)
109
+ # Retrieve the template
110
+ template = prompt_templates.get(model_type, prompt_templates["default"]) # Default template if none found
111
+ # Apply the template
112
+ prompt = ""
113
+ if system_prompt is not None:
114
+ prompt += template["system"].format(system_prompt=system_prompt)
115
+ prompt += template["user"].format(user_prompt=user_prompt)
116
+ prompt += template["assistant"].format(assistant_prompt=assistant_prompt)
117
+ return prompt
118
+
119
+
120
+ def apply_system_template(model_path, system_prompt):
121
+ model_type = get_model_type(model_path)
122
+ template = prompt_templates.get(model_type, prompt_templates["default"])
123
+ return template["system"].format(system_prompt=system_prompt)
124
+
125
+ def apply_user_template(model_path, user_prompt):
126
+ model_type = get_model_type(model_path)
127
+ template = prompt_templates.get(model_type, prompt_templates["default"])
128
+ return template["user"].format(user_prompt=user_prompt)
129
+
130
+ def apply_assistant_template(model_path, assistant_prompt):
131
+ model_type = get_model_type(model_path)
132
+ template = prompt_templates.get(model_type, prompt_templates["default"])
133
+ return template["assistant"].format(assistant_prompt=assistant_prompt)
134
+
135
+
136
+ def get_stop_token(model_name):
137
+ """
138
+ Get the stop token according to the model name / type.
139
+
140
+ Parameters:
141
+ model_name (str): The name of the model used to determine its type.
142
+ """
143
+ # If there is a stop token
144
+ try:
145
+ # Get the model type
146
+ model_type = model_types[model_name]
147
+ # Get the template for the model type
148
+ stop_token = stop_tokens[model_type]
149
+ except KeyError as e:
150
+ print(f"Your model {model_name} has no stop token defined. See prompt_management.py. (detail: {e})")
151
+ return None
152
+ return stop_token
153
+
154
+
155
+
156
+
157
+
@@ -91,4 +91,8 @@ def sleep_seconds(seconds: float):
91
91
  """
92
92
  if seconds <= 0:
93
93
  return
94
- time.sleep(seconds)
94
+ time.sleep(seconds)
95
+
96
+ def set_proxy(proxy: str):
97
+ os.environ["http_proxy"] = proxy
98
+ os.environ["https_proxy"] = proxy