edsl 0.1.36.dev3__py3-none-any.whl → 0.1.36.dev4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
edsl/__version__.py CHANGED
@@ -1 +1 @@
1
- __version__ = "0.1.36.dev3"
1
+ __version__ = "0.1.36.dev4"
edsl/coop/coop.py CHANGED
@@ -6,6 +6,7 @@ from typing import Any, Optional, Union, Literal
6
6
  from uuid import UUID
7
7
  import edsl
8
8
  from edsl import CONFIG, CacheEntry, Jobs, Survey
9
+ from edsl.exceptions.coop import CoopNoUUIDError, CoopServerResponseError
9
10
  from edsl.coop.utils import (
10
11
  EDSLObject,
11
12
  ObjectRegistry,
@@ -99,7 +100,7 @@ class Coop:
99
100
  if "Authorization" in message:
100
101
  print(message)
101
102
  message = "Please provide an Expected Parrot API key."
102
- raise Exception(message)
103
+ raise CoopServerResponseError(message)
103
104
 
104
105
  def _json_handle_none(self, value: Any) -> Any:
105
106
  """
@@ -116,7 +117,7 @@ class Coop:
116
117
  Resolve the uuid from a uuid or a url.
117
118
  """
118
119
  if not url and not uuid:
119
- raise Exception("No uuid or url provided for the object.")
120
+ raise CoopNoUUIDError("No uuid or url provided for the object.")
120
121
  if not uuid and url:
121
122
  uuid = url.split("/")[-1]
122
123
  return uuid
@@ -0,0 +1,84 @@
1
+ class RemoteCacheSync:
2
+ def __init__(self, coop, cache, output_func, remote_cache=True, remote_cache_description=""):
3
+ self.coop = coop
4
+ self.cache = cache
5
+ self._output = output_func
6
+ self.remote_cache = remote_cache
7
+ self.old_entry_keys = []
8
+ self.new_cache_entries = []
9
+ self.remote_cache_description = remote_cache_description
10
+
11
+ def __enter__(self):
12
+ if self.remote_cache:
13
+ self._sync_from_remote()
14
+ self.old_entry_keys = list(self.cache.keys())
15
+ return self
16
+
17
+ def __exit__(self, exc_type, exc_value, traceback):
18
+ if self.remote_cache:
19
+ self._sync_to_remote()
20
+ return False # Propagate exceptions
21
+
22
+ def _sync_from_remote(self):
23
+ cache_difference = self.coop.remote_cache_get_diff(self.cache.keys())
24
+ client_missing_cacheentries = cache_difference.get("client_missing_cacheentries", [])
25
+ missing_entry_count = len(client_missing_cacheentries)
26
+
27
+ if missing_entry_count > 0:
28
+ self._output(
29
+ f"Updating local cache with {missing_entry_count:,} new "
30
+ f"{'entry' if missing_entry_count == 1 else 'entries'} from remote..."
31
+ )
32
+ self.cache.add_from_dict({entry.key: entry for entry in client_missing_cacheentries})
33
+ self._output("Local cache updated!")
34
+ else:
35
+ self._output("No new entries to add to local cache.")
36
+
37
+ def _sync_to_remote(self):
38
+ cache_difference = self.coop.remote_cache_get_diff(self.cache.keys())
39
+ server_missing_cacheentry_keys = cache_difference.get("server_missing_cacheentry_keys", [])
40
+ server_missing_cacheentries = [
41
+ entry
42
+ for key in server_missing_cacheentry_keys
43
+ if (entry := self.cache.data.get(key)) is not None
44
+ ]
45
+
46
+ new_cache_entries = [
47
+ entry for entry in self.cache.values() if entry.key not in self.old_entry_keys
48
+ ]
49
+ server_missing_cacheentries.extend(new_cache_entries)
50
+ new_entry_count = len(server_missing_cacheentries)
51
+
52
+ if new_entry_count > 0:
53
+ self._output(
54
+ f"Updating remote cache with {new_entry_count:,} new "
55
+ f"{'entry' if new_entry_count == 1 else 'entries'}..."
56
+ )
57
+ self.coop.remote_cache_create_many(
58
+ server_missing_cacheentries,
59
+ visibility="private",
60
+ description=self.remote_cache_description,
61
+ )
62
+ self._output("Remote cache updated!")
63
+ else:
64
+ self._output("No new entries to add to remote cache.")
65
+
66
+ self._output(f"There are {len(self.cache.keys()):,} entries in the local cache.")
67
+
68
+ # # Usage example
69
+ # def run_job(self, n, progress_bar, cache, stop_on_exception, sidecar_model, print_exceptions, raise_validation_errors, use_remote_cache=True):
70
+ # with RemoteCacheSync(self.coop, cache, self._output, remote_cache=use_remote_cache):
71
+ # self._output("Running job...")
72
+ # results = self._run_local(
73
+ # n=n,
74
+ # progress_bar=progress_bar,
75
+ # cache=cache,
76
+ # stop_on_exception=stop_on_exception,
77
+ # sidecar_model=sidecar_model,
78
+ # print_exceptions=print_exceptions,
79
+ # raise_validation_errors=raise_validation_errors,
80
+ # )
81
+ # self._output("Job completed!")
82
+
83
+ # results.cache = cache.new_entries_cache()
84
+ # return results
edsl/exceptions/coop.py CHANGED
@@ -1,2 +1,10 @@
1
1
  class CoopErrors(Exception):
2
2
  pass
3
+
4
+
5
+ class CoopNoUUIDError(CoopErrors):
6
+ pass
7
+
8
+
9
+ class CoopServerResponseError(CoopErrors):
10
+ pass
edsl/jobs/Jobs.py CHANGED
@@ -1,8 +1,10 @@
1
1
  # """The Jobs class is a collection of agents, scenarios and models and one survey."""
2
2
  from __future__ import annotations
3
3
  import warnings
4
+ import requests
4
5
  from itertools import product
5
6
  from typing import Optional, Union, Sequence, Generator
7
+
6
8
  from edsl.Base import Base
7
9
  from edsl.exceptions import MissingAPIKeyError
8
10
  from edsl.jobs.buckets.BucketCollection import BucketCollection
@@ -10,6 +12,9 @@ from edsl.jobs.interviews.Interview import Interview
10
12
  from edsl.jobs.runners.JobsRunnerAsyncio import JobsRunnerAsyncio
11
13
  from edsl.utilities.decorators import add_edsl_version, remove_edsl_version
12
14
 
15
+ from edsl.data.RemoteCacheSync import RemoteCacheSync
16
+ from edsl.exceptions.coop import CoopServerResponseError
17
+
13
18
 
14
19
  class Jobs(Base):
15
20
  """
@@ -203,10 +208,6 @@ class Jobs(Base):
203
208
  ]
204
209
  )
205
210
  return d
206
- # if table:
207
- # d.to_scenario_list().print(format="rich")
208
- # else:
209
- # return d
210
211
 
211
212
  def show_prompts(self) -> None:
212
213
  """Print the prompts."""
@@ -615,7 +616,7 @@ class Jobs(Base):
615
616
 
616
617
  def _output(self, message) -> None:
617
618
  """Check if a Job is verbose. If so, print the message."""
618
- if self.verbose:
619
+ if hasattr(self, "verbose") and self.verbose:
619
620
  print(message)
620
621
 
621
622
  def _check_parameters(self, strict=False, warn=False) -> None:
@@ -692,6 +693,122 @@ class Jobs(Base):
692
693
  return False
693
694
  return self._raise_validation_errors
694
695
 
696
+ def create_remote_inference_job(
697
+ self, iterations: int = 1, remote_inference_description: Optional[str] = None
698
+ ):
699
+ """ """
700
+ from edsl.coop.coop import Coop
701
+
702
+ coop = Coop()
703
+ self._output("Remote inference activated. Sending job to server...")
704
+ remote_job_creation_data = coop.remote_inference_create(
705
+ self,
706
+ description=remote_inference_description,
707
+ status="queued",
708
+ iterations=iterations,
709
+ )
710
+ return remote_job_creation_data
711
+
712
+ @staticmethod
713
+ def check_status(job_uuid):
714
+ from edsl.coop.coop import Coop
715
+
716
+ coop = Coop()
717
+ return coop.remote_inference_get(job_uuid)
718
+
719
+ def poll_remote_inference_job(
720
+ self, remote_job_creation_data: dict
721
+ ) -> Union[Results, None]:
722
+ from edsl.coop.coop import Coop
723
+ import time
724
+ from datetime import datetime
725
+ from edsl.config import CONFIG
726
+
727
+ expected_parrot_url = CONFIG.get("EXPECTED_PARROT_URL")
728
+
729
+ job_uuid = remote_job_creation_data.get("uuid")
730
+
731
+ coop = Coop()
732
+ job_in_queue = True
733
+ while job_in_queue:
734
+ remote_job_data = coop.remote_inference_get(job_uuid)
735
+ status = remote_job_data.get("status")
736
+ if status == "cancelled":
737
+ print("\r" + " " * 80 + "\r", end="")
738
+ print("Job cancelled by the user.")
739
+ print(
740
+ f"See {expected_parrot_url}/home/remote-inference for more details."
741
+ )
742
+ return None
743
+ elif status == "failed":
744
+ print("\r" + " " * 80 + "\r", end="")
745
+ print("Job failed.")
746
+ print(
747
+ f"See {expected_parrot_url}/home/remote-inference for more details."
748
+ )
749
+ return None
750
+ elif status == "completed":
751
+ results_uuid = remote_job_data.get("results_uuid")
752
+ results = coop.get(results_uuid, expected_object_type="results")
753
+ print("\r" + " " * 80 + "\r", end="")
754
+ print(
755
+ f"Job completed and Results stored on Coop (Results uuid={results_uuid})."
756
+ )
757
+ return results
758
+ else:
759
+ duration = 5
760
+ time_checked = datetime.now().strftime("%Y-%m-%d %I:%M:%S %p")
761
+ frames = ["⠋", "⠙", "⠹", "⠸", "⠼", "⠴", "⠦", "⠧", "⠇", "⠏"]
762
+ start_time = time.time()
763
+ i = 0
764
+ while time.time() - start_time < duration:
765
+ print(
766
+ f"\r{frames[i % len(frames)]} Job status: {status} - last update: {time_checked}",
767
+ end="",
768
+ flush=True,
769
+ )
770
+ time.sleep(0.1)
771
+ i += 1
772
+
773
+ def use_remote_inference(self, disable_remote_inference: bool):
774
+ if disable_remote_inference:
775
+ return False
776
+ if not disable_remote_inference:
777
+ try:
778
+ from edsl import Coop
779
+
780
+ user_edsl_settings = Coop().edsl_settings
781
+ return user_edsl_settings.get("remote_inference", False)
782
+ except requests.ConnectionError:
783
+ pass
784
+ except CoopServerResponseError as e:
785
+ pass
786
+
787
+ return False
788
+
789
+ def use_remote_cache(self):
790
+ try:
791
+ from edsl import Coop
792
+
793
+ user_edsl_settings = Coop().edsl_settings
794
+ return user_edsl_settings.get("remote_caching", False)
795
+ except requests.ConnectionError:
796
+ pass
797
+ except CoopServerResponseError as e:
798
+ pass
799
+
800
+ return False
801
+
802
+ def check_api_keys(self):
803
+ from edsl import Model
804
+
805
+ for model in self.models + [Model()]:
806
+ if not model.has_valid_api_key():
807
+ raise MissingAPIKeyError(
808
+ model_name=str(model.model),
809
+ inference_service=model._inference_service_,
810
+ )
811
+
695
812
  def run(
696
813
  self,
697
814
  n: int = 1,
@@ -729,91 +846,17 @@ class Jobs(Base):
729
846
 
730
847
  self.verbose = verbose
731
848
 
732
- remote_cache = False
733
- remote_inference = False
734
-
735
- if not disable_remote_inference:
736
- try:
737
- coop = Coop()
738
- user_edsl_settings = Coop().edsl_settings
739
- remote_cache = user_edsl_settings.get("remote_caching", False)
740
- remote_inference = user_edsl_settings.get("remote_inference", False)
741
- except Exception:
742
- pass
743
-
744
- if remote_inference:
745
- import time
746
- from datetime import datetime
747
- from edsl.config import CONFIG
748
-
749
- expected_parrot_url = CONFIG.get("EXPECTED_PARROT_URL")
750
-
751
- self._output("Remote inference activated. Sending job to server...")
752
- if remote_cache:
753
- self._output(
754
- "Remote caching activated. The remote cache will be used for this job."
755
- )
756
-
757
- remote_job_creation_data = coop.remote_inference_create(
758
- self,
759
- description=remote_inference_description,
760
- status="queued",
761
- iterations=n,
849
+ if remote_inference := self.use_remote_inference(disable_remote_inference):
850
+ remote_job_creation_data = self.create_remote_inference_job(
851
+ iterations=n, remote_inference_description=remote_inference_description
762
852
  )
763
- time_queued = datetime.now().strftime("%m/%d/%Y %I:%M:%S %p")
764
- job_uuid = remote_job_creation_data.get("uuid")
765
- print(f"Remote inference started (Job uuid={job_uuid}).")
766
- # print(f"Job queued at {time_queued}.")
767
- job_in_queue = True
768
- while job_in_queue:
769
- remote_job_data = coop.remote_inference_get(job_uuid)
770
- status = remote_job_data.get("status")
771
- if status == "cancelled":
772
- print("\r" + " " * 80 + "\r", end="")
773
- print("Job cancelled by the user.")
774
- print(
775
- f"See {expected_parrot_url}/home/remote-inference for more details."
776
- )
777
- return None
778
- elif status == "failed":
779
- print("\r" + " " * 80 + "\r", end="")
780
- print("Job failed.")
781
- print(
782
- f"See {expected_parrot_url}/home/remote-inference for more details."
783
- )
784
- return None
785
- elif status == "completed":
786
- results_uuid = remote_job_data.get("results_uuid")
787
- results = coop.get(results_uuid, expected_object_type="results")
788
- print("\r" + " " * 80 + "\r", end="")
789
- print(
790
- f"Job completed and Results stored on Coop (Results uuid={results_uuid})."
791
- )
792
- return results
793
- else:
794
- duration = 5
795
- time_checked = datetime.now().strftime("%Y-%m-%d %I:%M:%S %p")
796
- frames = ["⠋", "⠙", "⠹", "⠸", "⠼", "⠴", "⠦", "⠧", "⠇", "⠏"]
797
- start_time = time.time()
798
- i = 0
799
- while time.time() - start_time < duration:
800
- print(
801
- f"\r{frames[i % len(frames)]} Job status: {status} - last update: {time_checked}",
802
- end="",
803
- flush=True,
804
- )
805
- time.sleep(0.1)
806
- i += 1
807
- else:
808
- if check_api_keys:
809
- from edsl import Model
853
+ results = self.poll_remote_inference_job(remote_job_creation_data)
854
+ if results is None:
855
+ self._output("Job failed.")
856
+ return results
810
857
 
811
- for model in self.models + [Model()]:
812
- if not model.has_valid_api_key():
813
- raise MissingAPIKeyError(
814
- model_name=str(model.model),
815
- inference_service=model._inference_service_,
816
- )
858
+ if check_api_keys:
859
+ self.check_api_keys()
817
860
 
818
861
  # handle cache
819
862
  if cache is None or cache is True:
@@ -825,7 +868,14 @@ class Jobs(Base):
825
868
 
826
869
  cache = Cache()
827
870
 
828
- if not remote_cache:
871
+ remote_cache = self.use_remote_cache()
872
+ with RemoteCacheSync(
873
+ coop=Coop(),
874
+ cache=cache,
875
+ output_func=self._output,
876
+ remote_cache=remote_cache,
877
+ remote_cache_description=remote_cache_description,
878
+ ) as r:
829
879
  results = self._run_local(
830
880
  n=n,
831
881
  progress_bar=progress_bar,
@@ -836,75 +886,7 @@ class Jobs(Base):
836
886
  raise_validation_errors=raise_validation_errors,
837
887
  )
838
888
 
839
- results.cache = cache.new_entries_cache()
840
-
841
- self._output(f"There are {len(cache.keys()):,} entries in the local cache.")
842
- else:
843
- cache_difference = coop.remote_cache_get_diff(cache.keys())
844
-
845
- client_missing_cacheentries = cache_difference.get(
846
- "client_missing_cacheentries", []
847
- )
848
-
849
- missing_entry_count = len(client_missing_cacheentries)
850
- if missing_entry_count > 0:
851
- self._output(
852
- f"Updating local cache with {missing_entry_count:,} new "
853
- f"{'entry' if missing_entry_count == 1 else 'entries'} from remote..."
854
- )
855
- cache.add_from_dict(
856
- {entry.key: entry for entry in client_missing_cacheentries}
857
- )
858
- self._output("Local cache updated!")
859
- else:
860
- self._output("No new entries to add to local cache.")
861
-
862
- server_missing_cacheentry_keys = cache_difference.get(
863
- "server_missing_cacheentry_keys", []
864
- )
865
- server_missing_cacheentries = [
866
- entry
867
- for key in server_missing_cacheentry_keys
868
- if (entry := cache.data.get(key)) is not None
869
- ]
870
- old_entry_keys = [key for key in cache.keys()]
871
-
872
- self._output("Running job...")
873
- results = self._run_local(
874
- n=n,
875
- progress_bar=progress_bar,
876
- cache=cache,
877
- stop_on_exception=stop_on_exception,
878
- sidecar_model=sidecar_model,
879
- print_exceptions=print_exceptions,
880
- raise_validation_errors=raise_validation_errors,
881
- )
882
- self._output("Job completed!")
883
-
884
- new_cache_entries = list(
885
- [entry for entry in cache.values() if entry.key not in old_entry_keys]
886
- )
887
- server_missing_cacheentries.extend(new_cache_entries)
888
-
889
- new_entry_count = len(server_missing_cacheentries)
890
- if new_entry_count > 0:
891
- self._output(
892
- f"Updating remote cache with {new_entry_count:,} new "
893
- f"{'entry' if new_entry_count == 1 else 'entries'}..."
894
- )
895
- coop.remote_cache_create_many(
896
- server_missing_cacheentries,
897
- visibility="private",
898
- description=remote_cache_description,
899
- )
900
- self._output("Remote cache updated!")
901
- else:
902
- self._output("No new entries to add to remote cache.")
903
-
904
- results.cache = cache.new_entries_cache()
905
-
906
- self._output(f"There are {len(cache.keys()):,} entries in the local cache.")
907
-
889
+ results.cache = cache.new_entries_cache()
908
890
  return results
909
891
 
910
892
  def _run_local(self, *args, **kwargs):
@@ -159,13 +159,13 @@ class Interview:
159
159
  return self.task_creators.interview_status
160
160
 
161
161
  # region: Serialization
162
- def _to_dict(self, include_exceptions=False) -> dict[str, Any]:
162
+ def _to_dict(self, include_exceptions=True) -> dict[str, Any]:
163
163
  """Return a dictionary representation of the Interview instance.
164
164
  This is just for hashing purposes.
165
165
 
166
166
  >>> i = Interview.example()
167
167
  >>> hash(i)
168
- 1646262796627658719
168
+ 1217840301076717434
169
169
  """
170
170
  d = {
171
171
  "agent": self.agent._to_dict(),
@@ -177,11 +177,29 @@ class Interview:
177
177
  }
178
178
  if include_exceptions:
179
179
  d["exceptions"] = self.exceptions.to_dict()
180
+ return d
181
+
182
+ @classmethod
183
+ def from_dict(cls, d: dict[str, Any]) -> "Interview":
184
+ """Return an Interview instance from a dictionary."""
185
+ agent = Agent.from_dict(d["agent"])
186
+ survey = Survey.from_dict(d["survey"])
187
+ scenario = Scenario.from_dict(d["scenario"])
188
+ model = LanguageModel.from_dict(d["model"])
189
+ iteration = d["iteration"]
190
+ return cls(agent=agent, survey=survey, scenario=scenario, model=model, iteration=iteration)
180
191
 
181
192
  def __hash__(self) -> int:
182
193
  from edsl.utilities.utilities import dict_hash
183
194
 
184
- return dict_hash(self._to_dict())
195
+ return dict_hash(self._to_dict(include_exceptions=False))
196
+
197
+ def __eq__(self, other: "Interview") -> bool:
198
+ """
199
+ >>> from edsl.jobs.interviews.Interview import Interview; i = Interview.example(); d = i._to_dict(); i2 = Interview.from_dict(d); i == i2
200
+ True
201
+ """
202
+ return hash(self) == hash(other)
185
203
 
186
204
  # endregion
187
205
 
@@ -33,6 +33,15 @@ class InterviewExceptionCollection(UserDict):
33
33
  """Return the collection of exceptions as a dictionary."""
34
34
  newdata = {k: [e.to_dict() for e in v] for k, v in self.data.items()}
35
35
  return newdata
36
+
37
+ @classmethod
38
+ def from_dict(cls, data: dict) -> "InterviewExceptionCollection":
39
+ """Create an InterviewExceptionCollection from a dictionary."""
40
+ collection = cls()
41
+ for question_name, entries in data.items():
42
+ for entry in entries:
43
+ collection.add(question_name, InterviewExceptionEntry.from_dict(entry))
44
+ return collection
36
45
 
37
46
  def _repr_html_(self) -> str:
38
47
  from edsl.utilities.utilities import data_to_html
@@ -9,7 +9,6 @@ class InterviewExceptionEntry:
9
9
  self,
10
10
  *,
11
11
  exception: Exception,
12
- # failed_question: FailedQuestion,
13
12
  invigilator: "Invigilator",
14
13
  traceback_format="text",
15
14
  answers=None,
@@ -133,22 +132,41 @@ class InterviewExceptionEntry:
133
132
  )
134
133
  console.print(tb)
135
134
  return html_output.getvalue()
135
+
136
+ @staticmethod
137
+ def serialize_exception(exception: Exception) -> dict:
138
+ return {
139
+ "type": type(exception).__name__,
140
+ "message": str(exception),
141
+ "traceback": "".join(traceback.format_exception(type(exception), exception, exception.__traceback__)),
142
+ }
143
+
144
+ @staticmethod
145
+ def deserialize_exception(data: dict) -> Exception:
146
+ exception_class = globals()[data["type"]]
147
+ return exception_class(data["message"])
136
148
 
137
149
  def to_dict(self) -> dict:
138
150
  """Return the exception as a dictionary.
139
151
 
140
152
  >>> entry = InterviewExceptionEntry.example()
141
- >>> entry.to_dict()['exception']
142
- ValueError()
143
-
153
+ >>> _ = entry.to_dict()
144
154
  """
145
155
  return {
146
- "exception": self.exception,
156
+ "exception": self.serialize_exception(self.exception),
147
157
  "time": self.time,
148
158
  "traceback": self.traceback,
149
- # "failed_question": self.failed_question.to_dict(),
150
159
  "invigilator": self.invigilator.to_dict(),
151
160
  }
161
+
162
+ @classmethod
163
+ def from_dict(cls, data: dict) -> "InterviewExceptionEntry":
164
+ """Create an InterviewExceptionEntry from a dictionary."""
165
+ from edsl.agents.Invigilator import InvigilatorAI
166
+
167
+ exception = cls.deserialize_exception(data["exception"])
168
+ invigilator = InvigilatorAI.from_dict(data["invigilator"])
169
+ return cls(exception=exception, invigilator=invigilator)
152
170
 
153
171
  def push(self):
154
172
  from edsl import Coop
@@ -1,18 +1,12 @@
1
1
  from __future__ import annotations
2
2
  import time
3
- import math
4
3
  import asyncio
5
- import functools
6
4
  import threading
7
5
  from typing import Coroutine, List, AsyncGenerator, Optional, Union, Generator
8
6
  from contextlib import contextmanager
9
7
  from collections import UserList
10
8
 
11
- from rich.live import Live
12
- from rich.console import Console
13
-
14
9
  from edsl.results.Results import Results
15
- from edsl import shared_globals
16
10
  from edsl.jobs.interviews.Interview import Interview
17
11
  from edsl.jobs.runners.JobsRunnerStatus import JobsRunnerStatus
18
12
 
@@ -25,7 +19,6 @@ from edsl.results.Results import Results
25
19
  from edsl.language_models.LanguageModel import LanguageModel
26
20
  from edsl.data.Cache import Cache
27
21
 
28
-
29
22
  class StatusTracker(UserList):
30
23
  def __init__(self, total_tasks: int):
31
24
  self.total_tasks = total_tasks
@@ -48,8 +41,6 @@ class JobsRunnerAsyncio:
48
41
  self.bucket_collection: "BucketCollection" = jobs.bucket_collection
49
42
  self.total_interviews: List["Interview"] = []
50
43
 
51
- # self.jobs_runner_status = JobsRunnerStatus(self, n=1)
52
-
53
44
  async def run_async_generator(
54
45
  self,
55
46
  cache: Cache,
@@ -228,17 +219,16 @@ class JobsRunnerAsyncio:
228
219
  }
229
220
  interview_hashes = list(interview_lookup.keys())
230
221
 
222
+ task_history = TaskHistory(self.total_interviews, include_traceback=False)
223
+
231
224
  results = Results(
232
225
  survey=self.jobs.survey,
233
226
  data=sorted(
234
227
  raw_results, key=lambda x: interview_hashes.index(x.interview_hash)
235
228
  ),
229
+ task_history=task_history,
230
+ cache=cache,
236
231
  )
237
- results.cache = cache
238
- results.task_history = TaskHistory(
239
- self.total_interviews, include_traceback=False
240
- )
241
- results.has_unfixed_exceptions = results.task_history.has_unfixed_exceptions
242
232
  results.bucket_collection = self.bucket_collection
243
233
 
244
234
  if results.has_unfixed_exceptions and print_exceptions:
@@ -266,6 +256,7 @@ class JobsRunnerAsyncio:
266
256
  except Exception as e:
267
257
  print(e)
268
258
  remote_logging = False
259
+
269
260
  if remote_logging:
270
261
  filestore = HTMLFileStore(filepath)
271
262
  coop_details = filestore.push(description="Error report")
@@ -8,7 +8,7 @@ from edsl.jobs.tasks.task_status_enum import TaskStatus
8
8
 
9
9
 
10
10
  class TaskHistory:
11
- def __init__(self, interviews: List["Interview"], include_traceback=False):
11
+ def __init__(self, interviews: List["Interview"], include_traceback:bool=False):
12
12
  """
13
13
  The structure of a TaskHistory exception
14
14
 
@@ -25,6 +25,8 @@ class TaskHistory:
25
25
 
26
26
  @classmethod
27
27
  def example(cls):
28
+ """
29
+ """
28
30
  from edsl.jobs.interviews.Interview import Interview
29
31
 
30
32
  from edsl.jobs.Jobs import Jobs
@@ -72,13 +74,27 @@ class TaskHistory:
72
74
 
73
75
  def to_dict(self):
74
76
  """Return the TaskHistory as a dictionary."""
77
+ # return {
78
+ # "exceptions": [
79
+ # e.to_dict(include_traceback=self.include_traceback)
80
+ # for e in self.exceptions
81
+ # ],
82
+ # "indices": self.indices,
83
+ # }
75
84
  return {
76
- "exceptions": [
77
- e.to_dict(include_traceback=self.include_traceback)
78
- for e in self.exceptions
79
- ],
80
- "indices": self.indices,
85
+ 'interviews': [i._to_dict() for i in self.total_interviews],
86
+ 'include_traceback': self.include_traceback
81
87
  }
88
+
89
+ @classmethod
90
+ def from_dict(cls, data: dict):
91
+ """Create a TaskHistory from a dictionary."""
92
+ if data is None:
93
+ return cls([], include_traceback=False)
94
+
95
+ from edsl.jobs.interviews.Interview import Interview
96
+ interviews = [Interview.from_dict(i) for i in data['interviews']]
97
+ return cls(interviews, include_traceback=data['include_traceback'])
82
98
 
83
99
  @property
84
100
  def has_exceptions(self) -> bool:
@@ -259,7 +275,6 @@ class TaskHistory:
259
275
  question_type = interview.survey.get_question(
260
276
  question_name
261
277
  ).question_type
262
- # breakpoint()
263
278
  if (question_name, question_type) not in exceptions_by_question_name:
264
279
  exceptions_by_question_name[(question_name, question_type)] = 0
265
280
  exceptions_by_question_name[(question_name, question_type)] += len(
edsl/results/Results.py CHANGED
@@ -29,6 +29,7 @@ from edsl.results.ResultsFetchMixin import ResultsFetchMixin
29
29
  from edsl.utilities.decorators import add_edsl_version, remove_edsl_version
30
30
  from edsl.utilities.utilities import dict_hash
31
31
 
32
+
32
33
  from edsl.Base import Base
33
34
 
34
35
 
@@ -89,6 +90,7 @@ class Results(UserList, Mixins, Base):
89
90
  cache: Optional["Cache"] = None,
90
91
  job_uuid: Optional[str] = None,
91
92
  total_results: Optional[int] = None,
93
+ task_history: Optional["TaskHistory"] = None,
92
94
  ):
93
95
  """Instantiate a `Results` object with a survey and a list of `Result` objects.
94
96
 
@@ -100,6 +102,7 @@ class Results(UserList, Mixins, Base):
100
102
  """
101
103
  super().__init__(data)
102
104
  from edsl.data.Cache import Cache
105
+ from edsl.jobs.tasks.TaskHistory import TaskHistory
103
106
 
104
107
  self.survey = survey
105
108
  self.created_columns = created_columns or []
@@ -107,6 +110,8 @@ class Results(UserList, Mixins, Base):
107
110
  self._total_results = total_results
108
111
  self.cache = cache or Cache()
109
112
 
113
+ self.task_history = task_history or TaskHistory(interviews = [])
114
+
110
115
  if hasattr(self, "_add_output_functions"):
111
116
  self._add_output_functions()
112
117
 
@@ -276,6 +281,7 @@ class Results(UserList, Mixins, Base):
276
281
  "survey": self.survey.to_dict(),
277
282
  "created_columns": self.created_columns,
278
283
  "cache": Cache() if not hasattr(self, "cache") else self.cache.to_dict(),
284
+ "task_history": self.task_history.to_dict(),
279
285
  }
280
286
 
281
287
  def compare(self, other_results):
@@ -295,6 +301,10 @@ class Results(UserList, Mixins, Base):
295
301
  "b_not_a": [other_results[i] for i in indices_other],
296
302
  }
297
303
 
304
+ @property
305
+ def has_unfixed_exceptions(self):
306
+ return self.task_history.has_unfixed_exceptions
307
+
298
308
  @add_edsl_version
299
309
  def to_dict(self) -> dict[str, Any]:
300
310
  """Convert the Results object to a dictionary.
@@ -305,7 +315,7 @@ class Results(UserList, Mixins, Base):
305
315
 
306
316
  >>> r = Results.example()
307
317
  >>> r.to_dict().keys()
308
- dict_keys(['data', 'survey', 'created_columns', 'cache', 'edsl_version', 'edsl_class_name'])
318
+ dict_keys(['data', 'survey', 'created_columns', 'cache', 'task_history', 'edsl_version', 'edsl_class_name'])
309
319
  """
310
320
  return self._to_dict()
311
321
 
@@ -358,6 +368,7 @@ class Results(UserList, Mixins, Base):
358
368
  """
359
369
  from edsl import Survey, Cache
360
370
  from edsl.results.Result import Result
371
+ from edsl.jobs.tasks.TaskHistory import TaskHistory
361
372
 
362
373
  try:
363
374
  results = cls(
@@ -367,6 +378,7 @@ class Results(UserList, Mixins, Base):
367
378
  cache=(
368
379
  Cache.from_dict(data.get("cache")) if "cache" in data else Cache()
369
380
  ),
381
+ task_history=TaskHistory.from_dict(data.get("task_history")),
370
382
  )
371
383
  except Exception as e:
372
384
  raise ResultsDeserializationError(f"Error in Results.from_dict: {e}")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: edsl
3
- Version: 0.1.36.dev3
3
+ Version: 0.1.36.dev4
4
4
  Summary: Create and analyze LLM-based surveys
5
5
  Home-page: https://www.expectedparrot.com/
6
6
  License: MIT
@@ -2,7 +2,7 @@ edsl/Base.py,sha256=wdFpHWlQlGNL4XfOmYA0AK9YupMDxK3G7mDHCQp60o4,9295
2
2
  edsl/BaseDiff.py,sha256=RoVEh52UJs22yMa7k7jv8se01G62jJNWnBzaZngo-Ug,8260
3
3
  edsl/TemplateLoader.py,sha256=sDBlSMt7EfOduM7w3h6v03gvh_Rzn9hVrlS-iLSQdZA,849
4
4
  edsl/__init__.py,sha256=UZcx9RHSi3Dslh2lWvCOeppdMW9Xzw_YLs-kFaNW1MU,1770
5
- edsl/__version__.py,sha256=O6OeyPKUHgURPf_T_wkVQhQyXsqbu_Q16mVBZIJ86uM,28
5
+ edsl/__version__.py,sha256=HBMCjiG9XIFS4hsly_bM8VF16Mlu-TRk3FhZ60DtWQE,28
6
6
  edsl/agents/Agent.py,sha256=dG3SbCm4IpHpObcWm-OejfYHtVXa5NlxGKYKOc-dUxQ,29311
7
7
  edsl/agents/AgentList.py,sha256=qo8VK3Ov0YOSbsBcHmlwLZBH81CcDfy5IEcx9AVH78M,10963
8
8
  edsl/agents/Invigilator.py,sha256=m4T-z4aNCGd4LKjLXVNI2VszYW-pQeScfcFAxkb0pWc,9080
@@ -46,11 +46,12 @@ edsl/conversation/mug_negotiation.py,sha256=mjvAqErD4AjN3G2za2c-X-3axOShW-zAJUei
46
46
  edsl/conversation/next_speaker_utilities.py,sha256=bqr5JglCd6bdLc9IZ5zGOAsmN2F4ERiubSMYvZIG7qk,3629
47
47
  edsl/coop/PriceFetcher.py,sha256=pCCWBqFnSv8iYpgQKhAzVCdan1xTCNesZgmIB34N4HY,1770
48
48
  edsl/coop/__init__.py,sha256=4iZCwJSzJVyjBYk8ggGxY2kZjq9dXVT1jhyPDNyew4I,115
49
- edsl/coop/coop.py,sha256=cN2lhj-pFucOB8KUoZRInu_65w3WBaXLcpg_zC6N-vM,28647
49
+ edsl/coop/coop.py,sha256=Q0bcCiHkVC2lrwCoWigHr1SYeW92PydQDXRBY1V1tks,28741
50
50
  edsl/coop/utils.py,sha256=UZwljKYW_Yjw7RYcjOg3SW7fn1pyHQfJ1fM48TBNoss,3601
51
51
  edsl/data/Cache.py,sha256=jDt0LoZjLpGnM8-CraQEcsQaVg--U3BiBR1zHj0nDn8,16536
52
52
  edsl/data/CacheEntry.py,sha256=_5UiFaJQu_U-Z1_lEPt-h6Gaidp2Eunk02wOd3Ni3MQ,7252
53
53
  edsl/data/CacheHandler.py,sha256=DxbfeT2nZGRu8yQkbWr2tyEnhNiClevMsd5KZMCq2f0,4793
54
+ edsl/data/RemoteCacheSync.py,sha256=V3Eznr1bCtSs0gnjdc_emmHND7l3fiK9samyPAVb6bo,3528
54
55
  edsl/data/SQLiteDict.py,sha256=V5Nfnxctgh4Iblqcw1KmbnkjtfmWrrombROSQ3mvg6A,8979
55
56
  edsl/data/__init__.py,sha256=KBNGGEuGHq--D-TlpAQmvv_If906dJc1Gsy028zOx78,170
56
57
  edsl/data/orm.py,sha256=Jz6rvw5SrlxwysTL0QI9r68EflKxeEBmf6j6himHDS8,238
@@ -59,7 +60,7 @@ edsl/enums.py,sha256=Z6nhaP8p3z0UJSfsCGb6VQUtGUKw3AK6yC0UDwOi05c,5247
59
60
  edsl/exceptions/__init__.py,sha256=HVg-U-rJ0fRoG9Rws6gnK5S9B68SkPWDPsoD6KpMZ-A,1370
60
61
  edsl/exceptions/agents.py,sha256=3SORFwFbMGrF6-vAL2GrKEVdPcXo7md_k2oYufnVXHA,673
61
62
  edsl/exceptions/configuration.py,sha256=qH2sInNTndKlCLAaNgaXHyRFdKQHL7-dElB_j8wz9g4,351
62
- edsl/exceptions/coop.py,sha256=xDr7k_Tir6L5AxO6GMmoFyUjZ3DIenPQflpUkaTqJl0,38
63
+ edsl/exceptions/coop.py,sha256=xunPPrnbcNHn60wnH-Qw0rC_Ey99X_N7HnOBF8BQg7E,138
63
64
  edsl/exceptions/data.py,sha256=K24CjgwFiMWxrF1Z2dF6F7Vfrge_y9kMK_wsYYSaroU,209
64
65
  edsl/exceptions/general.py,sha256=zAyJnppPjjxQAn6X3A5fetmv5FUR7kQDU58vwBKvAks,1114
65
66
  edsl/exceptions/jobs.py,sha256=sSUATmzBIN1oINWuwPExxPqIWmfCo0XYj_yR4dJzVjo,803
@@ -87,26 +88,26 @@ edsl/inference_services/rate_limits_cache.py,sha256=HYslviz7mxF9U4CUTPAkoyBsiXjS
87
88
  edsl/inference_services/registry.py,sha256=Fn6va65MqD9lnFvT603ZnU7Ok8IW64M2MzOH57kf9-A,1240
88
89
  edsl/inference_services/write_available.py,sha256=NNwhATlaMp8IYY635MSx-oYxt5X15acjAfaqYCo_I1Y,285
89
90
  edsl/jobs/Answers.py,sha256=c4LpigQjdnMr7iJu8571C4FggGPVudfT7hbJgmgKW40,1821
90
- edsl/jobs/Jobs.py,sha256=CgZ3oVI4StzoQsjXp8K7k3cZbo_jf-DH0kAbn-LECxo,41466
91
+ edsl/jobs/Jobs.py,sha256=mZKyTxlcOgnB23JN6xVS4DiAJx1eDMJL7vMEF8D2PqU,39869
91
92
  edsl/jobs/__init__.py,sha256=aKuAyd_GoalGj-k7djOoVwEbFUE2XLPlikXaA1_8yAg,32
92
93
  edsl/jobs/buckets/BucketCollection.py,sha256=11CRisE1WAPcAlI3YJK3DVvu0AqSvv8KskXo4Q1waSk,2286
93
94
  edsl/jobs/buckets/ModelBuckets.py,sha256=hxw_tzc0V42CiB7mh5jIxlgwDVJ-zFZhlLtKrHEg8ho,2419
94
95
  edsl/jobs/buckets/TokenBucket.py,sha256=7fG4omzTcj5xC2iJLO9bfBkdAGs6Y3weXzlA3BgPr0E,9090
95
- edsl/jobs/interviews/Interview.py,sha256=epaxVw53gplPpDpj1n_ahL3Pibs8vqCasF9QjXve1do,24963
96
- edsl/jobs/interviews/InterviewExceptionCollection.py,sha256=Ez8BCZUD3odqoY9h-gzYKKM8yaHynQ-eYw2uMDh7t98,3279
97
- edsl/jobs/interviews/InterviewExceptionEntry.py,sha256=2ZQOrhoHvfZJx9p357BQUZCfMXGkuU4xITYLyFGK9sg,4806
96
+ edsl/jobs/interviews/Interview.py,sha256=nsDxbMF0iOEYpgXcmzKVwAtkvarvsWeSsr1rhUTaIak,25755
97
+ edsl/jobs/interviews/InterviewExceptionCollection.py,sha256=HRhxuwR_CQXs22yKm1PCpbv3pgh5t0UTBRbdFhODRM0,3670
98
+ edsl/jobs/interviews/InterviewExceptionEntry.py,sha256=vqtVnT35wUVMwc8YfVhoOgyCKCjpiBdyPHPd-PWpZJY,5589
98
99
  edsl/jobs/interviews/InterviewStatistic.py,sha256=hY5d2EkIJ96NilPpZAvZZzZoxLXM7ss3xx5MIcKtTPs,1856
99
100
  edsl/jobs/interviews/InterviewStatisticsCollection.py,sha256=_ZZ0fnZBQiIywP9Q_wWjpWhlfcPe2cn32GKut10t5RI,788
100
101
  edsl/jobs/interviews/InterviewStatusDictionary.py,sha256=MSyys4hOWe1d8gfsUvAPbcKrs8YiPnz8jpufBSJL7SU,2485
101
102
  edsl/jobs/interviews/InterviewStatusLog.py,sha256=6u0F8gf5tha39VQL-IK_QPkCsQAYVOx_IesX7TDDX_A,3252
102
103
  edsl/jobs/interviews/ReportErrors.py,sha256=RSzDU2rWwtjfztj7sqaMab0quCiY-X2bG3AEOxhTim8,1745
103
104
  edsl/jobs/interviews/interview_status_enum.py,sha256=KJ-1yLAHdX-p8TiFnM0M3v1tnBwkq4aMCuBX6-ytrI8,229
104
- edsl/jobs/runners/JobsRunnerAsyncio.py,sha256=urlD6tRQ9iI2k23sod9C1tKFOM0Y7B9Mcp31VChcIB4,13011
105
+ edsl/jobs/runners/JobsRunnerAsyncio.py,sha256=6i9X8zDfl0cXWtVAZDzph0Ei-RIUHOsqsq3mtQNQ6D8,12744
105
106
  edsl/jobs/runners/JobsRunnerStatus.py,sha256=4eCh9sRpswGdKeSMW9pCGCAjJZa-OrWUPI7tsxIy_g4,12112
106
107
  edsl/jobs/runners/JobsRunnerStatusData.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
107
108
  edsl/jobs/tasks/QuestionTaskCreator.py,sha256=K-xATHIXMWPTMOnms5UDW30eTIlIfebf7oOEfwrh1ME,10072
108
109
  edsl/jobs/tasks/TaskCreators.py,sha256=XqAbNU33378Z4PQncokbfJwnKt3KHR9aqa5fKYRDpfg,2694
109
- edsl/jobs/tasks/TaskHistory.py,sha256=xRSo22ipyUSJ15w_k2Jc3dZ04VLwft8zfvm3smAIYrA,14227
110
+ edsl/jobs/tasks/TaskHistory.py,sha256=9HgXsggiuo77TX8HLXMG3NVeupzS8qLSf-lZPGlps0s,14796
110
111
  edsl/jobs/tasks/TaskStatusLog.py,sha256=bqH36a32F12fjX-M-4lNOhHaK2-WLFzKE-r0PxZPRjI,546
111
112
  edsl/jobs/tasks/task_status_enum.py,sha256=DOyrz61YlIS8R1W7izJNphcLrJ7I_ReUlfdRmk23h0Q,5333
112
113
  edsl/jobs/tokens/InterviewTokenUsage.py,sha256=u_6-IHpGFwZ6qMEXr24-jyLVUSSp4dSs_4iAZsBv7O4,1100
@@ -203,7 +204,7 @@ edsl/results/Dataset.py,sha256=XeCWNcni1rde9iVzmC1WTIne2cip4-f2gQL5iaJfXNw,9202
203
204
  edsl/results/DatasetExportMixin.py,sha256=-YR-UeuIW_8u0a8HnQ9R6V41DxCq22_AlsD48fXv0sw,25890
204
205
  edsl/results/DatasetTree.py,sha256=nwEgnWBqRXUxagSCEgqwikmIo8ztUxaF-QH-m-8myyQ,4985
205
206
  edsl/results/Result.py,sha256=85TlWtcNwCc98N-w3JF0APIkq5LmHfB8cXyW1T5s3f8,15576
206
- edsl/results/Results.py,sha256=zp4yDt-rPqgEPpv2xCGppQIzzwiKX-BbUpnJtY739L4,40807
207
+ edsl/results/Results.py,sha256=XdPN_RCpYaQ00SWUdzuxFvReVv8__q-oq87-3Du_szY,41317
207
208
  edsl/results/ResultsDBMixin.py,sha256=Hc08aOiArBf9jbxI5uV4VL4wT6BLOkaaEgTMb3zyTUI,7922
208
209
  edsl/results/ResultsExportMixin.py,sha256=XizBsPNxziyffirMA4kS7UHpYM1WIE4s1K-B7TqTfDw,1266
209
210
  edsl/results/ResultsFetchMixin.py,sha256=VEa0TKDcXbnTinSKs9YaE4WjOSLmlp9Po1_9kklFvSo,848
@@ -272,7 +273,7 @@ edsl/utilities/interface.py,sha256=AaKpWiwWBwP2swNXmnFlIf3ZFsjfsR5bjXQAW47tD-8,1
272
273
  edsl/utilities/repair_functions.py,sha256=tftmklAqam6LOQQu_-9U44N-llycffhW8LfO63vBmNw,929
273
274
  edsl/utilities/restricted_python.py,sha256=5-_zUhrNbos7pLhDl9nr8d24auRlquR6w-vKkmNjPiA,2060
274
275
  edsl/utilities/utilities.py,sha256=gqMtWWNEZkWLiRR9vHW-VRNy2bStEPlJ-I2aK9CwFiQ,11367
275
- edsl-0.1.36.dev3.dist-info/LICENSE,sha256=_qszBDs8KHShVYcYzdMz3HNMtH-fKN_p5zjoVAVumFc,1111
276
- edsl-0.1.36.dev3.dist-info/METADATA,sha256=wpLmlLTVi16NQ3gIq388xFmlvYOBnc_iR8zpOUDDiRI,4476
277
- edsl-0.1.36.dev3.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
278
- edsl-0.1.36.dev3.dist-info/RECORD,,
276
+ edsl-0.1.36.dev4.dist-info/LICENSE,sha256=_qszBDs8KHShVYcYzdMz3HNMtH-fKN_p5zjoVAVumFc,1111
277
+ edsl-0.1.36.dev4.dist-info/METADATA,sha256=htWeZ0pTPW3Lzb4Ygw147ShH5d0Oqlo5WrlYePlLobc,4476
278
+ edsl-0.1.36.dev4.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
279
+ edsl-0.1.36.dev4.dist-info/RECORD,,