edsl 0.1.38.dev4__py3-none-any.whl → 0.1.39__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- edsl/Base.py +197 -116
- edsl/__init__.py +15 -7
- edsl/__version__.py +1 -1
- edsl/agents/Agent.py +351 -147
- edsl/agents/AgentList.py +211 -73
- edsl/agents/Invigilator.py +101 -50
- edsl/agents/InvigilatorBase.py +62 -70
- edsl/agents/PromptConstructor.py +143 -225
- edsl/agents/QuestionInstructionPromptBuilder.py +128 -0
- edsl/agents/QuestionTemplateReplacementsBuilder.py +137 -0
- edsl/agents/__init__.py +0 -1
- edsl/agents/prompt_helpers.py +3 -3
- edsl/agents/question_option_processor.py +172 -0
- edsl/auto/AutoStudy.py +18 -5
- edsl/auto/StageBase.py +53 -40
- edsl/auto/StageQuestions.py +2 -1
- edsl/auto/utilities.py +0 -6
- edsl/config.py +22 -2
- edsl/conversation/car_buying.py +2 -1
- edsl/coop/CoopFunctionsMixin.py +15 -0
- edsl/coop/ExpectedParrotKeyHandler.py +125 -0
- edsl/coop/PriceFetcher.py +1 -1
- edsl/coop/coop.py +125 -47
- edsl/coop/utils.py +14 -14
- edsl/data/Cache.py +45 -27
- edsl/data/CacheEntry.py +12 -15
- edsl/data/CacheHandler.py +31 -12
- edsl/data/RemoteCacheSync.py +154 -46
- edsl/data/__init__.py +4 -3
- edsl/data_transfer_models.py +2 -1
- edsl/enums.py +27 -0
- edsl/exceptions/__init__.py +50 -50
- edsl/exceptions/agents.py +12 -0
- edsl/exceptions/inference_services.py +5 -0
- edsl/exceptions/questions.py +24 -6
- edsl/exceptions/scenarios.py +7 -0
- edsl/inference_services/AnthropicService.py +38 -19
- edsl/inference_services/AvailableModelCacheHandler.py +184 -0
- edsl/inference_services/AvailableModelFetcher.py +215 -0
- edsl/inference_services/AwsBedrock.py +0 -2
- edsl/inference_services/AzureAI.py +0 -2
- edsl/inference_services/GoogleService.py +7 -12
- edsl/inference_services/InferenceServiceABC.py +18 -85
- edsl/inference_services/InferenceServicesCollection.py +120 -79
- edsl/inference_services/MistralAIService.py +0 -3
- edsl/inference_services/OpenAIService.py +47 -35
- edsl/inference_services/PerplexityService.py +0 -3
- edsl/inference_services/ServiceAvailability.py +135 -0
- edsl/inference_services/TestService.py +11 -10
- edsl/inference_services/TogetherAIService.py +5 -3
- edsl/inference_services/data_structures.py +134 -0
- edsl/jobs/AnswerQuestionFunctionConstructor.py +223 -0
- edsl/jobs/Answers.py +1 -14
- edsl/jobs/FetchInvigilator.py +47 -0
- edsl/jobs/InterviewTaskManager.py +98 -0
- edsl/jobs/InterviewsConstructor.py +50 -0
- edsl/jobs/Jobs.py +356 -431
- edsl/jobs/JobsChecks.py +35 -10
- edsl/jobs/JobsComponentConstructor.py +189 -0
- edsl/jobs/JobsPrompts.py +6 -4
- edsl/jobs/JobsRemoteInferenceHandler.py +205 -133
- edsl/jobs/JobsRemoteInferenceLogger.py +239 -0
- edsl/jobs/RequestTokenEstimator.py +30 -0
- edsl/jobs/async_interview_runner.py +138 -0
- edsl/jobs/buckets/BucketCollection.py +44 -3
- edsl/jobs/buckets/TokenBucket.py +53 -21
- edsl/jobs/buckets/TokenBucketAPI.py +211 -0
- edsl/jobs/buckets/TokenBucketClient.py +191 -0
- edsl/jobs/check_survey_scenario_compatibility.py +85 -0
- edsl/jobs/data_structures.py +120 -0
- edsl/jobs/decorators.py +35 -0
- edsl/jobs/interviews/Interview.py +143 -408
- edsl/jobs/jobs_status_enums.py +9 -0
- edsl/jobs/loggers/HTMLTableJobLogger.py +304 -0
- edsl/jobs/results_exceptions_handler.py +98 -0
- edsl/jobs/runners/JobsRunnerAsyncio.py +88 -403
- edsl/jobs/runners/JobsRunnerStatus.py +133 -165
- edsl/jobs/tasks/QuestionTaskCreator.py +21 -19
- edsl/jobs/tasks/TaskHistory.py +38 -18
- edsl/jobs/tasks/task_status_enum.py +0 -2
- edsl/language_models/ComputeCost.py +63 -0
- edsl/language_models/LanguageModel.py +194 -236
- edsl/language_models/ModelList.py +28 -19
- edsl/language_models/PriceManager.py +127 -0
- edsl/language_models/RawResponseHandler.py +106 -0
- edsl/language_models/ServiceDataSources.py +0 -0
- edsl/language_models/__init__.py +1 -2
- edsl/language_models/key_management/KeyLookup.py +63 -0
- edsl/language_models/key_management/KeyLookupBuilder.py +273 -0
- edsl/language_models/key_management/KeyLookupCollection.py +38 -0
- edsl/language_models/key_management/__init__.py +0 -0
- edsl/language_models/key_management/models.py +131 -0
- edsl/language_models/model.py +256 -0
- edsl/language_models/repair.py +2 -2
- edsl/language_models/utilities.py +5 -4
- edsl/notebooks/Notebook.py +19 -14
- edsl/notebooks/NotebookToLaTeX.py +142 -0
- edsl/prompts/Prompt.py +29 -39
- edsl/questions/ExceptionExplainer.py +77 -0
- edsl/questions/HTMLQuestion.py +103 -0
- edsl/questions/QuestionBase.py +68 -214
- edsl/questions/QuestionBasePromptsMixin.py +7 -3
- edsl/questions/QuestionBudget.py +1 -1
- edsl/questions/QuestionCheckBox.py +3 -3
- edsl/questions/QuestionExtract.py +5 -7
- edsl/questions/QuestionFreeText.py +2 -3
- edsl/questions/QuestionList.py +10 -18
- edsl/questions/QuestionMatrix.py +265 -0
- edsl/questions/QuestionMultipleChoice.py +67 -23
- edsl/questions/QuestionNumerical.py +2 -4
- edsl/questions/QuestionRank.py +7 -17
- edsl/questions/SimpleAskMixin.py +4 -3
- edsl/questions/__init__.py +2 -1
- edsl/questions/{AnswerValidatorMixin.py → answer_validator_mixin.py} +47 -2
- edsl/questions/data_structures.py +20 -0
- edsl/questions/derived/QuestionLinearScale.py +6 -3
- edsl/questions/derived/QuestionTopK.py +1 -1
- edsl/questions/descriptors.py +17 -3
- edsl/questions/loop_processor.py +149 -0
- edsl/questions/{QuestionBaseGenMixin.py → question_base_gen_mixin.py} +57 -50
- edsl/questions/question_registry.py +1 -1
- edsl/questions/{ResponseValidatorABC.py → response_validator_abc.py} +40 -26
- edsl/questions/response_validator_factory.py +34 -0
- edsl/questions/templates/matrix/__init__.py +1 -0
- edsl/questions/templates/matrix/answering_instructions.jinja +5 -0
- edsl/questions/templates/matrix/question_presentation.jinja +20 -0
- edsl/results/CSSParameterizer.py +1 -1
- edsl/results/Dataset.py +170 -7
- edsl/results/DatasetExportMixin.py +168 -305
- edsl/results/DatasetTree.py +28 -8
- edsl/results/MarkdownToDocx.py +122 -0
- edsl/results/MarkdownToPDF.py +111 -0
- edsl/results/Result.py +298 -206
- edsl/results/Results.py +149 -131
- edsl/results/ResultsExportMixin.py +2 -0
- edsl/results/TableDisplay.py +98 -171
- edsl/results/TextEditor.py +50 -0
- edsl/results/__init__.py +1 -1
- edsl/results/file_exports.py +252 -0
- edsl/results/{Selector.py → results_selector.py} +23 -13
- edsl/results/smart_objects.py +96 -0
- edsl/results/table_data_class.py +12 -0
- edsl/results/table_renderers.py +118 -0
- edsl/scenarios/ConstructDownloadLink.py +109 -0
- edsl/scenarios/DocumentChunker.py +102 -0
- edsl/scenarios/DocxScenario.py +16 -0
- edsl/scenarios/FileStore.py +150 -239
- edsl/scenarios/PdfExtractor.py +40 -0
- edsl/scenarios/Scenario.py +90 -193
- edsl/scenarios/ScenarioHtmlMixin.py +4 -3
- edsl/scenarios/ScenarioList.py +415 -244
- edsl/scenarios/ScenarioListExportMixin.py +0 -7
- edsl/scenarios/ScenarioListPdfMixin.py +15 -37
- edsl/scenarios/__init__.py +1 -2
- edsl/scenarios/directory_scanner.py +96 -0
- edsl/scenarios/file_methods.py +85 -0
- edsl/scenarios/handlers/__init__.py +13 -0
- edsl/scenarios/handlers/csv.py +49 -0
- edsl/scenarios/handlers/docx.py +76 -0
- edsl/scenarios/handlers/html.py +37 -0
- edsl/scenarios/handlers/json.py +111 -0
- edsl/scenarios/handlers/latex.py +5 -0
- edsl/scenarios/handlers/md.py +51 -0
- edsl/scenarios/handlers/pdf.py +68 -0
- edsl/scenarios/handlers/png.py +39 -0
- edsl/scenarios/handlers/pptx.py +105 -0
- edsl/scenarios/handlers/py.py +294 -0
- edsl/scenarios/handlers/sql.py +313 -0
- edsl/scenarios/handlers/sqlite.py +149 -0
- edsl/scenarios/handlers/txt.py +33 -0
- edsl/scenarios/{ScenarioJoin.py → scenario_join.py} +10 -6
- edsl/scenarios/scenario_selector.py +156 -0
- edsl/study/ObjectEntry.py +1 -1
- edsl/study/SnapShot.py +1 -1
- edsl/study/Study.py +5 -12
- edsl/surveys/ConstructDAG.py +92 -0
- edsl/surveys/EditSurvey.py +221 -0
- edsl/surveys/InstructionHandler.py +100 -0
- edsl/surveys/MemoryManagement.py +72 -0
- edsl/surveys/Rule.py +5 -4
- edsl/surveys/RuleCollection.py +25 -27
- edsl/surveys/RuleManager.py +172 -0
- edsl/surveys/Simulator.py +75 -0
- edsl/surveys/Survey.py +270 -791
- edsl/surveys/SurveyCSS.py +20 -8
- edsl/surveys/{SurveyFlowVisualizationMixin.py → SurveyFlowVisualization.py} +11 -9
- edsl/surveys/SurveyToApp.py +141 -0
- edsl/surveys/__init__.py +4 -2
- edsl/surveys/descriptors.py +6 -2
- edsl/surveys/instructions/ChangeInstruction.py +1 -2
- edsl/surveys/instructions/Instruction.py +4 -13
- edsl/surveys/instructions/InstructionCollection.py +11 -6
- edsl/templates/error_reporting/interview_details.html +1 -1
- edsl/templates/error_reporting/report.html +1 -1
- edsl/tools/plotting.py +1 -1
- edsl/utilities/PrettyList.py +56 -0
- edsl/utilities/is_notebook.py +18 -0
- edsl/utilities/is_valid_variable_name.py +11 -0
- edsl/utilities/remove_edsl_version.py +24 -0
- edsl/utilities/utilities.py +35 -23
- {edsl-0.1.38.dev4.dist-info → edsl-0.1.39.dist-info}/METADATA +12 -10
- edsl-0.1.39.dist-info/RECORD +358 -0
- {edsl-0.1.38.dev4.dist-info → edsl-0.1.39.dist-info}/WHEEL +1 -1
- edsl/language_models/KeyLookup.py +0 -30
- edsl/language_models/registry.py +0 -190
- edsl/language_models/unused/ReplicateBase.py +0 -83
- edsl/results/ResultsDBMixin.py +0 -238
- edsl-0.1.38.dev4.dist-info/RECORD +0 -277
- /edsl/questions/{RegisterQuestionsMeta.py → register_questions_meta.py} +0 -0
- /edsl/results/{ResultsFetchMixin.py → results_fetch_mixin.py} +0 -0
- /edsl/results/{ResultsToolsMixin.py → results_tools_mixin.py} +0 -0
- {edsl-0.1.38.dev4.dist-info → edsl-0.1.39.dist-info}/LICENSE +0 -0
edsl/results/TableDisplay.py
CHANGED
@@ -1,198 +1,125 @@
|
|
1
|
-
from
|
2
|
-
|
1
|
+
from typing import (
|
2
|
+
Protocol,
|
3
|
+
List,
|
4
|
+
Any,
|
5
|
+
Optional,
|
6
|
+
TYPE_CHECKING,
|
7
|
+
Sequence,
|
8
|
+
Union,
|
9
|
+
Literal,
|
10
|
+
)
|
3
11
|
|
4
|
-
|
12
|
+
if TYPE_CHECKING:
|
13
|
+
from edsl.results.Dataset import Dataset
|
5
14
|
|
15
|
+
from edsl.results.table_data_class import TableData
|
6
16
|
|
7
|
-
|
8
|
-
max_height = 400
|
9
|
-
min_height = 200
|
17
|
+
from edsl.results.table_renderers import DataTablesRenderer, PandasStyleRenderer
|
10
18
|
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
19
|
+
Row = Sequence[Union[str, int, float, bool, None]]
|
20
|
+
TableFormat = Literal[
|
21
|
+
"grid", "simple", "pipe", "orgtbl", "rst", "mediawiki", "html", "latex"
|
22
|
+
]
|
23
|
+
|
24
|
+
|
25
|
+
class TableRenderer(Protocol):
|
26
|
+
"""Table renderer protocol"""
|
27
|
+
|
28
|
+
def render_html(self, table_data: TableData) -> str:
|
29
|
+
pass
|
30
|
+
|
31
|
+
|
32
|
+
# Modified TableDisplay class
|
33
|
+
class TableDisplay:
|
34
|
+
def __init__(
|
35
|
+
self,
|
36
|
+
headers: Sequence[str],
|
37
|
+
data: Sequence[Row],
|
38
|
+
tablefmt: Optional[TableFormat] = None,
|
39
|
+
raw_data_set: "Dataset" = None,
|
40
|
+
renderer_class: Optional[TableRenderer] = None,
|
41
|
+
):
|
42
|
+
assert len(headers) == len(data[0]) # Check if headers and data are consistent
|
16
43
|
|
17
|
-
def __init__(self, headers, data, tablefmt=None, raw_data_set=None):
|
18
44
|
self.headers = headers
|
19
45
|
self.data = data
|
20
46
|
self.tablefmt = tablefmt
|
21
47
|
self.raw_data_set = raw_data_set
|
22
48
|
|
49
|
+
self.renderer_class = renderer_class or PandasStyleRenderer
|
50
|
+
|
51
|
+
# Handle printing parameters from raw_data_set
|
23
52
|
if hasattr(raw_data_set, "print_parameters"):
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
self.printing_parameters = {}
|
53
|
+
self.printing_parameters = (
|
54
|
+
raw_data_set.print_parameters if raw_data_set.print_parameters else {}
|
55
|
+
)
|
28
56
|
else:
|
29
57
|
self.printing_parameters = {}
|
30
58
|
|
31
|
-
def
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
with open(filename, "w") as file:
|
41
|
-
print("Writing table to", filename)
|
42
|
-
file.write(table)
|
43
|
-
|
44
|
-
def to_pandas(self):
|
45
|
-
return self.raw_data_set.to_pandas()
|
46
|
-
|
47
|
-
def to_list(self):
|
48
|
-
return self.raw_data_set.to_list()
|
59
|
+
def _repr_html_(self) -> str:
|
60
|
+
table_data = TableData(
|
61
|
+
headers=self.headers,
|
62
|
+
data=self.data,
|
63
|
+
parameters=self.printing_parameters,
|
64
|
+
raw_data_set=self.raw_data_set,
|
65
|
+
)
|
66
|
+
return self.renderer_class(table_data).render_html()
|
49
67
|
|
50
68
|
def __repr__(self):
|
51
69
|
from tabulate import tabulate
|
52
70
|
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
71
|
+
return tabulate(self.data, headers=self.headers, tablefmt=self.tablefmt)
|
72
|
+
|
73
|
+
@classmethod
|
74
|
+
def from_dictionary(
|
75
|
+
cls,
|
76
|
+
dictionary: dict,
|
77
|
+
tablefmt: Optional[TableFormat] = None,
|
78
|
+
renderer: Optional[TableRenderer] = None,
|
79
|
+
) -> "TableDisplay":
|
80
|
+
headers = list(dictionary.keys())
|
81
|
+
data = [list(dictionary.values())]
|
82
|
+
return cls(headers, data, tablefmt, renderer_class=renderer)
|
83
|
+
|
84
|
+
@classmethod
|
85
|
+
def from_dictionary_wide(
|
86
|
+
cls,
|
87
|
+
dictionary: dict,
|
88
|
+
tablefmt: Optional[TableFormat] = None,
|
89
|
+
renderer: Optional[TableRenderer] = None,
|
90
|
+
) -> "TableDisplay":
|
91
|
+
headers = ["key", "value"]
|
92
|
+
data = [[k, v] for k, v in dictionary.items()]
|
93
|
+
return cls(headers, data, tablefmt, renderer_class=renderer)
|
57
94
|
|
58
|
-
|
95
|
+
@classmethod
|
96
|
+
def from_dataset(
|
97
|
+
cls,
|
98
|
+
dataset: "Dataset",
|
99
|
+
tablefmt: Optional[TableFormat] = None,
|
100
|
+
renderer: Optional[TableRenderer] = None,
|
101
|
+
) -> "TableDisplay":
|
102
|
+
headers, data = dataset._tabular()
|
103
|
+
return cls(headers, data, tablefmt, dataset, renderer_class=renderer)
|
104
|
+
|
105
|
+
def long(self) -> "TableDisplay":
|
106
|
+
"""Convert to long format"""
|
59
107
|
new_header = ["row", "key", "value"]
|
60
108
|
new_data = []
|
61
109
|
for index, row in enumerate(self.data):
|
62
110
|
new_data.extend([[index, k, v] for k, v in zip(self.headers, row)])
|
63
|
-
return TableDisplay(
|
64
|
-
|
65
|
-
|
66
|
-
if self.tablefmt is not None:
|
67
|
-
return (
|
68
|
-
"<pre>"
|
69
|
-
+ tabulate(self.data, headers=self.headers, tablefmt=self.tablefmt)
|
70
|
-
+ "</pre>"
|
71
|
-
)
|
72
|
-
|
73
|
-
num_rows = len(self.data)
|
74
|
-
height = min(
|
75
|
-
num_rows * 30 + 50, self.max_height
|
76
|
-
) # Added extra space for header
|
111
|
+
return TableDisplay(
|
112
|
+
new_header, new_data, self.tablefmt, renderer_class=self.renderer_class
|
113
|
+
)
|
77
114
|
|
78
|
-
if height < self.min_height:
|
79
|
-
height = self.min_height
|
80
115
|
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
<div class="table-container">
|
86
|
-
<div class="scroll-table-wrapper">
|
87
|
-
{table}
|
88
|
-
</div>
|
89
|
-
</div>
|
90
|
-
"""
|
91
|
-
|
92
|
-
html_content = tabulate(self.data, headers=self.headers, tablefmt="html")
|
93
|
-
html_content = html_content.replace("<table>", '<table class="scroll-table">')
|
94
|
-
|
95
|
-
height_string = f"{height}px"
|
96
|
-
parameters = {"containerHeight": height_string, "headerColor": "blue"}
|
97
|
-
parameters.update(self.printing_parameters)
|
98
|
-
rendered_css = CSSParameterizer(self.get_css()).apply_parameters(parameters)
|
99
|
-
|
100
|
-
return html_template.format(table=html_content, css=rendered_css)
|
101
|
-
|
102
|
-
@classmethod
|
103
|
-
def example(
|
104
|
-
cls,
|
105
|
-
headers=None,
|
106
|
-
data=None,
|
107
|
-
filename: str = "table_example.html",
|
108
|
-
auto_open: bool = True,
|
109
|
-
):
|
110
|
-
"""
|
111
|
-
Creates a standalone HTML file with an example table in an iframe and optionally opens it in a new tab.
|
112
|
-
|
113
|
-
Args:
|
114
|
-
cls: The class itself
|
115
|
-
headers (list): List of column headers. If None, uses example headers
|
116
|
-
data (list): List of data rows. If None, uses example data
|
117
|
-
filename (str): The name of the HTML file to create. Defaults to "table_example.html"
|
118
|
-
auto_open (bool): Whether to automatically open the file in the default web browser. Defaults to True
|
119
|
-
|
120
|
-
Returns:
|
121
|
-
str: The path to the created HTML file
|
122
|
-
"""
|
123
|
-
import os
|
124
|
-
import webbrowser
|
125
|
-
|
126
|
-
# Use example data if none provided
|
127
|
-
if headers is None:
|
128
|
-
headers = ["Name", "Age", "City", "Occupation"]
|
129
|
-
if data is None:
|
130
|
-
data = [
|
131
|
-
[
|
132
|
-
"John Doe",
|
133
|
-
30,
|
134
|
-
"New York",
|
135
|
-
"""cls: The class itself
|
136
|
-
headers (list): List of column headers. If None, uses example headers
|
137
|
-
data (list): List of data rows. If None, uses example data
|
138
|
-
filename (str): The name of the HTML file to create. Defaults to "table_example.html"
|
139
|
-
auto_open (bool): Whether to automatically open the file in the default web browser. Defaults to True
|
140
|
-
""",
|
141
|
-
],
|
142
|
-
["Jane Smith", 28, "San Francisco", "Designer"],
|
143
|
-
["Bob Johnson", 35, "Chicago", "Manager"],
|
144
|
-
["Alice Brown", 25, "Boston", "Developer"],
|
145
|
-
["Charlie Wilson", 40, "Seattle", "Architect"],
|
146
|
-
]
|
147
|
-
|
148
|
-
# Create instance with the data
|
149
|
-
instance = cls(headers=headers, data=data)
|
150
|
-
|
151
|
-
# Get the table HTML content
|
152
|
-
table_html = instance._repr_html_()
|
153
|
-
|
154
|
-
# Calculate the appropriate iframe height
|
155
|
-
num_rows = len(data)
|
156
|
-
iframe_height = min(num_rows * 140 + 50, cls.max_height)
|
157
|
-
print(f"Table height: {iframe_height}px")
|
158
|
-
|
159
|
-
# Create the full HTML document
|
160
|
-
html_content = f"""
|
161
|
-
<!DOCTYPE html>
|
162
|
-
<html>
|
163
|
-
<head>
|
164
|
-
<title>Table Display Example</title>
|
165
|
-
<style>
|
166
|
-
body {{
|
167
|
-
margin: 0;
|
168
|
-
padding: 20px;
|
169
|
-
font-family: Arial, sans-serif;
|
170
|
-
}}
|
171
|
-
iframe {{
|
172
|
-
width: 100%;
|
173
|
-
height: {iframe_height}px;
|
174
|
-
border: none;
|
175
|
-
overflow: hidden;
|
176
|
-
}}
|
177
|
-
</style>
|
178
|
-
</head>
|
179
|
-
<body>
|
180
|
-
<iframe srcdoc='{table_html}'></iframe>
|
181
|
-
</body>
|
182
|
-
</html>
|
183
|
-
"""
|
184
|
-
|
185
|
-
# Write the HTML file
|
186
|
-
abs_path = os.path.abspath(filename)
|
187
|
-
with open(filename, "w", encoding="utf-8") as f:
|
188
|
-
f.write(html_content)
|
189
|
-
|
190
|
-
# Open in browser if requested
|
191
|
-
if auto_open:
|
192
|
-
webbrowser.open("file://" + abs_path, new=2)
|
193
|
-
|
194
|
-
return abs_path
|
116
|
+
# Example usage:
|
117
|
+
if __name__ == "__main__":
|
118
|
+
headers = ["Name", "Age", "City"]
|
119
|
+
data = [["John", 30, "New York"], ["Jane", 25, "London"]]
|
195
120
|
|
121
|
+
# Using default (Pandas) renderer
|
122
|
+
table1 = TableDisplay(headers, data)
|
196
123
|
|
197
|
-
|
198
|
-
TableDisplay
|
124
|
+
# Using DataTables renderer
|
125
|
+
table2 = TableDisplay(headers, data, renderer=DataTablesRenderer())
|
@@ -0,0 +1,50 @@
|
|
1
|
+
try:
|
2
|
+
import gradio as gr
|
3
|
+
except ImportError:
|
4
|
+
print("Gradio is not installed. Please install it using `pip install gradio`")
|
5
|
+
|
6
|
+
import time
|
7
|
+
|
8
|
+
|
9
|
+
class TextEditor:
|
10
|
+
def __init__(self, initial_text=""):
|
11
|
+
self.text = initial_text
|
12
|
+
self._text_saved = False
|
13
|
+
|
14
|
+
def save_text(self, new_text):
|
15
|
+
self.text = new_text
|
16
|
+
self._text_saved = True
|
17
|
+
return "Text saved successfully!"
|
18
|
+
|
19
|
+
def edit_gui(self):
|
20
|
+
js_code = """
|
21
|
+
async (text) => {
|
22
|
+
await navigator.clipboard.writeText(text);
|
23
|
+
return "Copied to clipboard!";
|
24
|
+
}
|
25
|
+
"""
|
26
|
+
|
27
|
+
with gr.Blocks() as interface:
|
28
|
+
text_area = gr.Textbox(
|
29
|
+
value=self.text, lines=10, label="Edit Text", placeholder="Type here..."
|
30
|
+
)
|
31
|
+
|
32
|
+
with gr.Row():
|
33
|
+
save_btn = gr.Button("Save")
|
34
|
+
copy_btn = gr.Button("Copy to Clipboard")
|
35
|
+
|
36
|
+
output = gr.Textbox(label="Status")
|
37
|
+
|
38
|
+
save_btn.click(fn=self.save_text, inputs=[text_area], outputs=[output])
|
39
|
+
|
40
|
+
# Add copy functionality
|
41
|
+
copy_btn.click(
|
42
|
+
fn=None, inputs=text_area, outputs=output, api_name=False, js=js_code
|
43
|
+
)
|
44
|
+
|
45
|
+
interface.launch(share=False, prevent_thread_lock=True)
|
46
|
+
|
47
|
+
while not self._text_saved:
|
48
|
+
time.sleep(0.1)
|
49
|
+
|
50
|
+
return self.text
|
edsl/results/__init__.py
CHANGED
@@ -1,2 +1,2 @@
|
|
1
|
-
from edsl.results.Result import Result
|
1
|
+
# from edsl.results.Result import Result
|
2
2
|
from edsl.results.Results import Results
|
@@ -0,0 +1,252 @@
|
|
1
|
+
from abc import ABC, abstractmethod
|
2
|
+
import io
|
3
|
+
import csv
|
4
|
+
import base64
|
5
|
+
from typing import Optional, Union, Tuple, List, Any, Dict
|
6
|
+
from openpyxl import Workbook
|
7
|
+
|
8
|
+
from edsl.scenarios.FileStore import FileStore
|
9
|
+
|
10
|
+
|
11
|
+
class FileExport(ABC):
|
12
|
+
def __init__(
|
13
|
+
self,
|
14
|
+
data: Any,
|
15
|
+
filename: Optional[str] = None,
|
16
|
+
remove_prefix: bool = False,
|
17
|
+
pretty_labels: Optional[Dict[str, str]] = None,
|
18
|
+
):
|
19
|
+
self.data = data
|
20
|
+
self.filename = filename # or self._get_default_filename()
|
21
|
+
self.remove_prefix = remove_prefix
|
22
|
+
self.pretty_labels = pretty_labels
|
23
|
+
|
24
|
+
@property
|
25
|
+
def mime_type(self) -> str:
|
26
|
+
"""Return the MIME type for this export format."""
|
27
|
+
return self.__class__.mime_type
|
28
|
+
|
29
|
+
@property
|
30
|
+
def suffix(self) -> str:
|
31
|
+
"""Return the file suffix for this format."""
|
32
|
+
return self.__class__.suffix
|
33
|
+
|
34
|
+
@property
|
35
|
+
def is_binary(self) -> bool:
|
36
|
+
"""Whether the format is binary or text-based."""
|
37
|
+
return self.__class__.is_binary
|
38
|
+
|
39
|
+
def _get_default_filename(self) -> str:
|
40
|
+
"""Generate default filename for this format."""
|
41
|
+
return f"results.{self.suffix}"
|
42
|
+
|
43
|
+
def _create_filestore(self, data: Union[str, bytes]) -> "FileStore":
|
44
|
+
"""Create a FileStore instance with encoded data."""
|
45
|
+
if isinstance(data, str):
|
46
|
+
base64_string = base64.b64encode(data.encode()).decode()
|
47
|
+
else:
|
48
|
+
base64_string = base64.b64encode(data).decode()
|
49
|
+
|
50
|
+
from edsl.scenarios.FileStore import FileStore
|
51
|
+
|
52
|
+
path = self.filename or self._get_default_filename()
|
53
|
+
|
54
|
+
fs = FileStore(
|
55
|
+
path=path,
|
56
|
+
mime_type=self.mime_type,
|
57
|
+
binary=self.is_binary,
|
58
|
+
suffix=self.suffix,
|
59
|
+
base64_string=base64_string,
|
60
|
+
)
|
61
|
+
|
62
|
+
if self.filename is not None:
|
63
|
+
fs.write(self.filename)
|
64
|
+
return None
|
65
|
+
return fs
|
66
|
+
|
67
|
+
@abstractmethod
|
68
|
+
def format_data(self) -> Union[str, bytes]:
|
69
|
+
"""Convert the input data to the target format."""
|
70
|
+
pass
|
71
|
+
|
72
|
+
def export(self) -> Optional["FileStore"]:
|
73
|
+
"""Export the data to a FileStore instance."""
|
74
|
+
formatted_data = self.format_data()
|
75
|
+
return self._create_filestore(formatted_data)
|
76
|
+
|
77
|
+
|
78
|
+
class JSONLExport(FileExport):
|
79
|
+
mime_type = "application/jsonl"
|
80
|
+
suffix = "jsonl"
|
81
|
+
is_binary = False
|
82
|
+
|
83
|
+
def format_data(self) -> str:
|
84
|
+
output = io.StringIO()
|
85
|
+
for entry in self.data:
|
86
|
+
key, values = list(entry.items())[0]
|
87
|
+
output.write(f'{{"{key}": {values}}}\n')
|
88
|
+
return output.getvalue()
|
89
|
+
|
90
|
+
|
91
|
+
class TabularExport(FileExport, ABC):
|
92
|
+
"""Base class for exports that use tabular data."""
|
93
|
+
|
94
|
+
def __init__(self, *args, **kwargs):
|
95
|
+
super().__init__(*args, **kwargs)
|
96
|
+
self.header, self.rows = self.data._get_tabular_data(
|
97
|
+
remove_prefix=self.remove_prefix, pretty_labels=self.pretty_labels
|
98
|
+
)
|
99
|
+
|
100
|
+
|
101
|
+
class CSVExport(TabularExport):
|
102
|
+
mime_type = "text/csv"
|
103
|
+
suffix = "csv"
|
104
|
+
is_binary = False
|
105
|
+
|
106
|
+
def format_data(self) -> str:
|
107
|
+
output = io.StringIO()
|
108
|
+
writer = csv.writer(output)
|
109
|
+
writer.writerow(self.header)
|
110
|
+
writer.writerows(self.rows)
|
111
|
+
return output.getvalue()
|
112
|
+
|
113
|
+
|
114
|
+
class ExcelExport(TabularExport):
|
115
|
+
mime_type = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
|
116
|
+
suffix = "xlsx"
|
117
|
+
is_binary = True
|
118
|
+
|
119
|
+
def __init__(self, *args, sheet_name: Optional[str] = None, **kwargs):
|
120
|
+
super().__init__(*args, **kwargs)
|
121
|
+
self.sheet_name = sheet_name or "Results"
|
122
|
+
|
123
|
+
def format_data(self) -> bytes:
|
124
|
+
wb = Workbook()
|
125
|
+
ws = wb.active
|
126
|
+
ws.title = self.sheet_name
|
127
|
+
|
128
|
+
# Write header
|
129
|
+
for col, value in enumerate(self.header, 1):
|
130
|
+
ws.cell(row=1, column=col, value=value)
|
131
|
+
|
132
|
+
# Write data rows
|
133
|
+
for row_idx, row_data in enumerate(self.rows, 2):
|
134
|
+
for col, value in enumerate(row_data, 1):
|
135
|
+
ws.cell(row=row_idx, column=col, value=value)
|
136
|
+
|
137
|
+
# Save to bytes buffer
|
138
|
+
buffer = io.BytesIO()
|
139
|
+
wb.save(buffer)
|
140
|
+
buffer.seek(0)
|
141
|
+
return buffer.getvalue()
|
142
|
+
|
143
|
+
|
144
|
+
import sqlite3
|
145
|
+
from typing import Any
|
146
|
+
|
147
|
+
|
148
|
+
class SQLiteExport(TabularExport):
|
149
|
+
mime_type = "application/x-sqlite3"
|
150
|
+
suffix = "db"
|
151
|
+
is_binary = True
|
152
|
+
|
153
|
+
def __init__(
|
154
|
+
self, *args, table_name: str = "results", if_exists: str = "replace", **kwargs
|
155
|
+
):
|
156
|
+
"""
|
157
|
+
Initialize SQLite export.
|
158
|
+
|
159
|
+
Args:
|
160
|
+
table_name: Name of the table to create
|
161
|
+
if_exists: How to handle existing table ('fail', 'replace', or 'append')
|
162
|
+
"""
|
163
|
+
super().__init__(*args, **kwargs)
|
164
|
+
self.table_name = table_name
|
165
|
+
self.if_exists = if_exists
|
166
|
+
|
167
|
+
def _get_column_types(self) -> list[tuple[str, str]]:
|
168
|
+
"""Infer SQL column types from the data."""
|
169
|
+
column_types = []
|
170
|
+
|
171
|
+
# Check first row of data for types
|
172
|
+
if self.rows:
|
173
|
+
first_row = self.rows[0]
|
174
|
+
for header, value in zip(self.header, first_row):
|
175
|
+
if isinstance(value, bool):
|
176
|
+
sql_type = "BOOLEAN"
|
177
|
+
elif isinstance(value, int):
|
178
|
+
sql_type = "INTEGER"
|
179
|
+
elif isinstance(value, float):
|
180
|
+
sql_type = "REAL"
|
181
|
+
else:
|
182
|
+
sql_type = "TEXT"
|
183
|
+
column_types.append((header, sql_type))
|
184
|
+
else:
|
185
|
+
# If no data, default to TEXT
|
186
|
+
column_types = [(header, "TEXT") for header in self.header]
|
187
|
+
|
188
|
+
return column_types
|
189
|
+
|
190
|
+
def _create_table(self, cursor: sqlite3.Cursor) -> None:
|
191
|
+
"""Create the table with appropriate schema."""
|
192
|
+
column_types = self._get_column_types()
|
193
|
+
|
194
|
+
# Drop existing table if replace mode
|
195
|
+
if self.if_exists == "replace":
|
196
|
+
cursor.execute(f"DROP TABLE IF EXISTS {self.table_name}")
|
197
|
+
elif self.if_exists == "fail":
|
198
|
+
cursor.execute(
|
199
|
+
f"SELECT name FROM sqlite_master WHERE type='table' AND name=?",
|
200
|
+
(self.table_name,),
|
201
|
+
)
|
202
|
+
if cursor.fetchone():
|
203
|
+
raise ValueError(f"Table {self.table_name} already exists")
|
204
|
+
|
205
|
+
# Create table
|
206
|
+
columns = ", ".join(f'"{col}" {dtype}' for col, dtype in column_types)
|
207
|
+
create_table_sql = f"""
|
208
|
+
CREATE TABLE IF NOT EXISTS {self.table_name} (
|
209
|
+
{columns}
|
210
|
+
)
|
211
|
+
"""
|
212
|
+
cursor.execute(create_table_sql)
|
213
|
+
|
214
|
+
def format_data(self) -> bytes:
|
215
|
+
"""Convert the data to a SQLite database file."""
|
216
|
+
buffer = io.BytesIO()
|
217
|
+
|
218
|
+
# Create in-memory database
|
219
|
+
conn = sqlite3.connect(":memory:")
|
220
|
+
cursor = conn.cursor()
|
221
|
+
|
222
|
+
# Create table and insert data
|
223
|
+
self._create_table(cursor)
|
224
|
+
|
225
|
+
# Prepare placeholders for INSERT
|
226
|
+
placeholders = ",".join(["?" for _ in self.header])
|
227
|
+
insert_sql = f"INSERT INTO {self.table_name} ({','.join(self.header)}) VALUES ({placeholders})"
|
228
|
+
|
229
|
+
# Insert data
|
230
|
+
cursor.executemany(insert_sql, self.rows)
|
231
|
+
conn.commit()
|
232
|
+
|
233
|
+
# Save to file buffer
|
234
|
+
conn.backup(sqlite3.connect(buffer))
|
235
|
+
conn.close()
|
236
|
+
|
237
|
+
buffer.seek(0)
|
238
|
+
return buffer.getvalue()
|
239
|
+
|
240
|
+
def _validate_params(self) -> None:
|
241
|
+
"""Validate initialization parameters."""
|
242
|
+
valid_if_exists = {"fail", "replace", "append"}
|
243
|
+
if self.if_exists not in valid_if_exists:
|
244
|
+
raise ValueError(
|
245
|
+
f"if_exists must be one of {valid_if_exists}, got {self.if_exists}"
|
246
|
+
)
|
247
|
+
|
248
|
+
# Validate table name (basic SQLite identifier validation)
|
249
|
+
if not self.table_name.isalnum() and not all(c in "_" for c in self.table_name):
|
250
|
+
raise ValueError(
|
251
|
+
f"Invalid table name: {self.table_name}. Must contain only alphanumeric characters and underscores."
|
252
|
+
)
|
@@ -1,7 +1,12 @@
|
|
1
|
-
from typing import Union, List, Dict, Any
|
1
|
+
from typing import Union, List, Dict, Any, Optional
|
2
|
+
import sys
|
2
3
|
from collections import defaultdict
|
3
4
|
from edsl.results.Dataset import Dataset
|
4
5
|
|
6
|
+
from edsl.exceptions.results import ResultsColumnNotFoundError
|
7
|
+
|
8
|
+
from edsl.utilities.is_notebook import is_notebook
|
9
|
+
|
5
10
|
|
6
11
|
class Selector:
|
7
12
|
def __init__(
|
@@ -19,11 +24,17 @@ class Selector:
|
|
19
24
|
self._fetch_list = fetch_list_func
|
20
25
|
self.columns = columns
|
21
26
|
|
22
|
-
def select(self, *columns: Union[str, List[str]]) ->
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
+
def select(self, *columns: Union[str, List[str]]) -> Optional[Dataset]:
|
28
|
+
try:
|
29
|
+
columns = self._normalize_columns(columns)
|
30
|
+
to_fetch = self._get_columns_to_fetch(columns)
|
31
|
+
new_data = self._fetch_data(to_fetch)
|
32
|
+
except ResultsColumnNotFoundError as e:
|
33
|
+
if is_notebook():
|
34
|
+
print("Error:", e, file=sys.stderr)
|
35
|
+
return None
|
36
|
+
else:
|
37
|
+
raise e
|
27
38
|
return Dataset(new_data)
|
28
39
|
|
29
40
|
def _normalize_columns(self, columns: Union[str, List[str]]) -> tuple:
|
@@ -63,17 +74,16 @@ class Selector:
|
|
63
74
|
search_in_list = self.columns
|
64
75
|
else:
|
65
76
|
search_in_list = [s.split(".")[1] for s in self.columns]
|
66
|
-
# breakpoint()
|
67
77
|
matches = [s for s in search_in_list if s.startswith(partial_name)]
|
68
78
|
return [partial_name] if partial_name in matches else matches
|
69
79
|
|
70
80
|
def _validate_matches(self, column: str, matches: List[str]):
|
71
81
|
if len(matches) > 1:
|
72
|
-
raise
|
82
|
+
raise ResultsColumnNotFoundError(
|
73
83
|
f"Column '{column}' is ambiguous. Did you mean one of {matches}?"
|
74
84
|
)
|
75
85
|
if len(matches) == 0 and ".*" not in column:
|
76
|
-
raise
|
86
|
+
raise ResultsColumnNotFoundError(f"Column '{column}' not found in data.")
|
77
87
|
|
78
88
|
def _parse_column(self, column: str) -> tuple[str, str]:
|
79
89
|
if "." in column:
|
@@ -89,11 +99,11 @@ class Selector:
|
|
89
99
|
close_matches = difflib.get_close_matches(column, self._key_to_data_type.keys())
|
90
100
|
if close_matches:
|
91
101
|
suggestions = ", ".join(close_matches)
|
92
|
-
raise
|
102
|
+
raise ResultsColumnNotFoundError(
|
93
103
|
f"Column '{column}' not found in data. Did you mean: {suggestions}?"
|
94
104
|
)
|
95
105
|
else:
|
96
|
-
raise
|
106
|
+
raise ResultsColumnNotFoundError(f"Column {column} not found in data")
|
97
107
|
|
98
108
|
def _process_column(self, data_type: str, key: str, to_fetch: Dict[str, List[str]]):
|
99
109
|
data_types = self._get_data_types_to_return(data_type)
|
@@ -108,13 +118,13 @@ class Selector:
|
|
108
118
|
self.items_in_order.append(f"{dt}.{k}")
|
109
119
|
|
110
120
|
if not found_once:
|
111
|
-
raise
|
121
|
+
raise ResultsColumnNotFoundError(f"Key {key} not found in data.")
|
112
122
|
|
113
123
|
def _get_data_types_to_return(self, parsed_data_type: str) -> List[str]:
|
114
124
|
if parsed_data_type == "*":
|
115
125
|
return self.known_data_types
|
116
126
|
if parsed_data_type not in self.known_data_types:
|
117
|
-
raise
|
127
|
+
raise ResultsColumnNotFoundError(
|
118
128
|
f"Data type {parsed_data_type} not found in data. Did you mean one of {self.known_data_types}"
|
119
129
|
)
|
120
130
|
return [parsed_data_type]
|